Skip to content

Commit

Permalink
Merge branch 'feature/case-merge' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
nadouani committed Nov 21, 2016
2 parents dbd4a19 + e994a8b commit 80edbbd
Show file tree
Hide file tree
Showing 20 changed files with 633 additions and 76 deletions.
2 changes: 1 addition & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ object Dependencies {
val reflections = "org.reflections" % "reflections" % "0.9.10"
val zip4j = "net.lingala.zip4j" % "zip4j" % "1.3.2"
val akkaTest = "com.typesafe.akka" %% "akka-stream-testkit" % "2.4.4"
val elastic4play = "org.cert-bdf" %% "elastic4play" % "1.1.0"
val elastic4play = "org.cert-bdf" %% "elastic4play" % "1.1.1-AIV-SNAPSHOT"

object Elastic4s {
private val version = "2.3.0"
Expand Down
59 changes: 38 additions & 21 deletions thehive-backend/app/controllers/Case.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,13 @@ import org.elastic4play.services.JsonFormat.{ aggReads, queryReads }

import models.{ Case, CaseStatus }
import services.{ CaseSrv, TaskSrv }
import services.CaseMergeSrv
import scala.util.Try

@Singleton
class CaseCtrl @Inject() (
caseSrv: CaseSrv,
caseMergeSrv: CaseMergeSrv,
taskSrv: TaskSrv,
auxSrv: AuxSrv,
authenticated: Authenticated,
Expand All @@ -39,46 +42,53 @@ class CaseCtrl @Inject() (
val log = Logger(getClass)

@Timed
def create() = authenticated(Role.write).async(fieldsBodyParser) { implicit request =>
def create() = authenticated(Role.write).async(fieldsBodyParser) { implicit request
caseSrv.create(request.body)
.map(caze => renderer.toOutput(CREATED, caze))
.map(caze renderer.toOutput(CREATED, caze))
}

@Timed
def get(id: String) = authenticated(Role.read).async { implicit request =>
caseSrv.get(id)
.map(caze => renderer.toOutput(OK, caze))
def get(id: String) = authenticated(Role.read).async { implicit request
val withStats = for {
statsValues <- request.queryString.get("nstats")
firstValue <- statsValues.headOption
} yield Try(firstValue.toBoolean).getOrElse(firstValue == "1")

for {
caze caseSrv.get(id)
casesWithStats auxSrv.apply(caze, 0, withStats.getOrElse(false))
} yield renderer.toOutput(OK, casesWithStats)
}

@Timed
def update(id: String) = authenticated(Role.write).async(fieldsBodyParser) { implicit request =>
def update(id: String) = authenticated(Role.write).async(fieldsBodyParser) { implicit request
val isCaseClosing = request.body.getString("status").filter(_ == CaseStatus.Resolved.toString).isDefined

for {
// Closing the case, so lets close the open tasks
caze <- caseSrv.update(id, request.body)
closedTasks <- if (isCaseClosing) taskSrv.closeTasksOfCase(id) else Future.successful(Nil) // FIXME log warning if closedTasks contains errors
caze caseSrv.update(id, request.body)
closedTasks if (isCaseClosing) taskSrv.closeTasksOfCase(id) else Future.successful(Nil) // FIXME log warning if closedTasks contains errors
} yield renderer.toOutput(OK, caze)
}

@Timed
def bulkUpdate() = authenticated(Role.write).async(fieldsBodyParser) { implicit request =>
def bulkUpdate() = authenticated(Role.write).async(fieldsBodyParser) { implicit request
val isCaseClosing = request.body.getString("status").filter(_ == CaseStatus.Resolved.toString).isDefined
request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids =>

request.body.getStrings("ids").fold(Future.successful(Ok(JsArray()))) { ids
if (isCaseClosing) taskSrv.closeTasksOfCase(ids: _*) // FIXME log warning if closedTasks contains errors
caseSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult => renderer.toMultiOutput(OK, multiResult))
caseSrv.bulkUpdate(ids, request.body.unset("ids")).map(multiResult renderer.toMultiOutput(OK, multiResult))
}
}

@Timed
def delete(id: String) = authenticated(Role.write).async { implicit request =>
def delete(id: String) = authenticated(Role.write).async { implicit request
caseSrv.delete(id)
.map(_ => NoContent)
.map(_ NoContent)
}

@Timed
def find() = authenticated(Role.read).async(fieldsBodyParser) { implicit request =>
def find() = authenticated(Role.read).async(fieldsBodyParser) { implicit request
val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef])
val range = request.body.getString("range")
val sort = request.body.getStrings("sort").getOrElse(Nil)
Expand All @@ -91,26 +101,33 @@ class CaseCtrl @Inject() (
}

@Timed
def stats() = authenticated(Role.read).async(fieldsBodyParser) { implicit request =>
def stats() = authenticated(Role.read).async(fieldsBodyParser) { implicit request
val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef])
val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]]
caseSrv.stats(query, aggs).map(s => Ok(s))
caseSrv.stats(query, aggs).map(s Ok(s))
}

@Timed
def linkedCases(id: String) = authenticated(Role.read).async { implicit request =>
def linkedCases(id: String) = authenticated(Role.read).async { implicit request
caseSrv.linkedCases(id)
.runWith(Sink.seq)
.map { cases =>
.map { cases
val casesList = cases.sortWith {
case ((c1, _), (c2, _)) => c1.startDate().after(c2.startDate())
case ((c1, _), (c2, _)) c1.startDate().after(c2.startDate())
}.map {
case (caze, artifacts) =>
case (caze, artifacts)
Json.toJson(caze).as[JsObject] - "description" +
("linkedWith" -> Json.toJson(artifacts)) +
("linksCount" -> Json.toJson(artifacts.size))
}
renderer.toOutput(OK, casesList)
}
}

@Timed
def merge(caseId1: String, caseId2: String) = authenticated(Role.read).async { implicit request
caseMergeSrv.merge(caseId1, caseId2).map { caze
renderer.toOutput(OK, caze)
}
}
}
113 changes: 85 additions & 28 deletions thehive-backend/app/models/Case.scala
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,30 @@ import play.api.libs.json.Json
import play.api.libs.json.Json.toJsFieldJsValueWrapper

import org.elastic4play.JsonFormat.dateFormat
import org.elastic4play.models.{ AttributeDef, AttributeFormat => F, AttributeOption => O, BaseEntity, EntityDef, HiveEnumeration, ModelDef }
import org.elastic4play.models.{ AttributeDef, AttributeFormat F, AttributeOption O, BaseEntity, EntityDef, HiveEnumeration, ModelDef }
import org.elastic4play.services.{ FindSrv, SequenceSrv }

import JsonFormat.{ caseImpactStatusFormat, caseResolutionStatusFormat, caseStatusFormat }
import services.AuditedModel
import services.CaseSrv
import play.api.Logger

object CaseStatus extends Enumeration with HiveEnumeration {
type Type = Value
val Ephemeral, Open, FalsePositive, TruePositive, Resolved, Deleted = Value
val Open, Resolved, Deleted = Value
}

object CaseResolutionStatus extends Enumeration with HiveEnumeration {
type Type = Value
val Indeterminate, FalsePositive, TruePositive, Other = Value
val Indeterminate, FalsePositive, TruePositive, Other, Duplicated = Value
}

object CaseImpactStatus extends Enumeration with HiveEnumeration {
type Type = Value
val NoImpact, WithImpact, NotApplicable = Value
}

trait CaseAttributes { _: AttributeDef =>
trait CaseAttributes { _: AttributeDef
val caseId = attribute("caseId", F.numberFmt, "Id of the case (auto-generated)", O.model)
val title = attribute("title", F.textFmt, "Title of the case")
val description = attribute("description", F.textFmt, "Description of the case")
Expand All @@ -50,56 +52,111 @@ trait CaseAttributes { _: AttributeDef =>
val resolutionStatus = optionalAttribute("resolutionStatus", F.enumFmt(CaseResolutionStatus), "Resolution status of the case")
val impactStatus = optionalAttribute("impactStatus", F.enumFmt(CaseImpactStatus), "Impact status of the case")
val summary = optionalAttribute("summary", F.textFmt, "Summary of the case, to be provided when closing a case")
val mergeInto = optionalAttribute("mergeInto", F.stringFmt, "Id of the case created by the merge")
val mergeFrom = multiAttribute("mergeFrom", F.stringFmt, "Id of the cases merged")
}

@Singleton
class CaseModel @Inject() (
artifactModel: Provider[ArtifactModel],
taskModel: Provider[TaskModel],
caseSrv: Provider[CaseSrv],
sequenceSrv: SequenceSrv,
findSrv: FindSrv,
implicit val ec: ExecutionContext) extends ModelDef[CaseModel, Case]("case") with CaseAttributes with AuditedModel { caseModel =>
implicit val ec: ExecutionContext) extends ModelDef[CaseModel, Case]("case") with CaseAttributes with AuditedModel { caseModel

lazy val logger = Logger(getClass)
override val defaultSortBy = Seq("-startDate")
override val removeAttribute = Json.obj("status" -> CaseStatus.Deleted)

override def creationHook(parent: Option[BaseEntity], attrs: JsObject) = {
sequenceSrv("case").map { caseId =>
sequenceSrv("case").map { caseId
attrs + ("caseId" -> JsNumber(caseId))
}
}

override def updateHook(entity: BaseEntity, updateAttrs: JsObject): Future[JsObject] = Future.successful {
(updateAttrs \ "status").asOpt[CaseStatus.Type] match {
case Some(CaseStatus.Resolved) if !updateAttrs.keys.contains("endDate") =>
case Some(CaseStatus.Resolved) if !updateAttrs.keys.contains("endDate")
updateAttrs + ("endDate" -> Json.toJson(new Date))
case Some(CaseStatus.Open) =>
case Some(CaseStatus.Open)
updateAttrs + ("endDate" -> JsArray(Nil))
case _ =>
case _
updateAttrs
}
}

override def getStats(entity: BaseEntity): Future[JsObject] = {
private[models] def buildArtifactStats(caze: Case): Future[JsObject] = {
import org.elastic4play.services.QueryDSL._
for {
taskStatsJson <- findSrv(
taskModel.get,
and(
"_parent" ~= entity.id,
"status" in ("Waiting", "InProgress", "Completed")),
groupByField("status", selectCount))
(taskCount, taskStats) = taskStatsJson.value.foldLeft((0L, JsObject(Nil))) {
case ((total, s), (key, value)) =>
val count = (value \ "count").as[Long]
(total + count, s + (key -> JsNumber(count)))
findSrv(
artifactModel.get,
and(
parent("case", withId(caze.id)),
"status" ~= "Ok"),
selectCount)
.map { artifactStats
Json.obj("artifacts" -> artifactStats)
}
}

private[models] def buildTaskStats(caze: Case): Future[JsObject] = {
import org.elastic4play.services.QueryDSL._
findSrv(
taskModel.get,
and(
parent("case", withId(caze.id)),
"status" in ("Waiting", "InProgress", "Completed")),
groupByField("status", selectCount))
.map { taskStatsJson
val (taskCount, taskStats) = taskStatsJson.value.foldLeft((0L, JsObject(Nil))) {
case ((total, s), (key, value))
val count = (value \ "count").as[Long]
(total + count, s + (key -> JsNumber(count)))
}
Json.obj("tasks" -> (taskStats + ("total" -> JsNumber(taskCount))))
}
}

private[models] def buildMergeIntoStats(caze: Case): Future[JsObject] = {
caze.mergeInto()
.fold(Future.successful(Json.obj())) { mergeCaseId
caseSrv.get.get(mergeCaseId).map { c
Json.obj("mergeInto" -> Json.obj(
"caseId" -> c.caseId(),
"title" -> c.title()))
}
}
artifactStats <- findSrv(
artifactModel.get,
and(
"_parent" ~= entity.id,
"status" ~= "Ok"),
selectCount)
} yield Json.obj("tasks" -> (taskStats + ("total" -> JsNumber(taskCount))), "artifacts" -> artifactStats)
}

private[models] def buildMergeFromStats(caze: Case): Future[JsObject] = {
Future
.traverse(caze.mergeFrom()) { id
caseSrv.get.get(id).map { c
Json.obj(
"caseId" -> c.caseId(),
"title" -> c.title())
}
}
.map {
case mf if !mf.isEmpty Json.obj("mergeFrom" -> mf)
case _ Json.obj()
}
}
override def getStats(entity: BaseEntity): Future[JsObject] = {


entity match {
case caze: Case
for {
taskStats <- buildTaskStats(caze)
artifactStats <- buildArtifactStats(caze)
mergeIntoStats <- buildMergeIntoStats(caze)
mergeFromStats <- buildMergeFromStats(caze)
} yield taskStats ++ artifactStats ++ mergeIntoStats ++ mergeFromStats
case other
logger.warn(s"Request caseStats from a non-case entity ?! ${other.getClass}:$other")
Future.successful(Json.obj())
}
}

override val computedMetrics = Map(
Expand Down
2 changes: 1 addition & 1 deletion thehive-backend/app/models/Job.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ trait JobAttributes { _: AttributeDef =>
val analyzerId = attribute("analyzerId", F.stringFmt, "Analyzer", O.readonly)
val status = attribute("status", F.enumFmt(JobStatus), "Status of the job", JobStatus.InProgress)
val artifactId = attribute("artifactId", F.stringFmt, "Original artifact on which this job was executed", O.readonly)
val startDate = attribute("startDate", F.dateFmt, "Timestamp of the job start", O.model)
val startDate = attribute("startDate", F.dateFmt, "Timestamp of the job start") // , O.model)
val endDate = optionalAttribute("endDate", F.dateFmt, "Timestamp of the job completion (or fail)")
val report = optionalAttribute("report", F.textFmt, "Analysis result", O.unaudited)

Expand Down
10 changes: 6 additions & 4 deletions thehive-backend/app/services/ArtifactSrv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ import org.elastic4play.services.{ Agg, AuthContext, CreateSrv, DeleteSrv, Field

import models.{ Artifact, ArtifactModel, ArtifactStatus, Case, CaseModel, JobModel }
import org.elastic4play.utils.{ RichFuture, RichOr }
import models.CaseStatus
import models.CaseResolutionStatus

@Singleton
class ArtifactSrv @Inject() (
Expand Down Expand Up @@ -102,14 +104,14 @@ class ArtifactSrv @Inject() (
def findSimilar(artifact: Artifact, range: Option[String], sortBy: Seq[String]) =
find(similarArtifactFilter(artifact), range, sortBy)

private def similarArtifactFilter(artifact: Artifact): QueryDef = {
private[services] def similarArtifactFilter(artifact: Artifact): QueryDef = {
import org.elastic4play.services.QueryDSL._
val dataType = artifact.dataType()
artifact.data() match {
// artifact is an hash
case Some(d) if dataType == "hash" =>
and(
not(parent("case", "_id" ~= artifact.parentId.get)),
parent("case", and(not(withId(artifact.parentId.get)), "status" ~!= CaseStatus.Deleted, "resolutionStatus" ~!= CaseResolutionStatus.Duplicated)),
"status" ~= "Ok",
or(
and(
Expand All @@ -119,7 +121,7 @@ class ArtifactSrv @Inject() (
// artifact contains data but not an hash
case Some(d) =>
and(
not(parent("case", "_id" ~= artifact.parentId.get)),
parent("case", and(not(withId(artifact.parentId.get)), "status" ~!= CaseStatus.Deleted, "resolutionStatus" ~!= CaseResolutionStatus.Duplicated)),
"status" ~= "Ok",
"data" ~= d,
"dataType" ~= dataType)
Expand All @@ -128,7 +130,7 @@ class ArtifactSrv @Inject() (
val hashes = artifact.attachment().toSeq.flatMap(_.hashes).map(_.toString)
val hashFilter = hashes.map { h => "attachment.hashes" ~= h }
and(
not(parent("case", "_id" ~= artifact.parentId.get)),
parent("case", and(not(withId(artifact.parentId.get)), "status" ~!= CaseStatus.Deleted, "resolutionStatus" ~!= CaseResolutionStatus.Duplicated)),
"status" ~= "Ok",
or(
hashFilter :+
Expand Down
Loading

0 comments on commit 80edbbd

Please sign in to comment.