From 4c0df9716b5e68172fbc7edde7d3e52dbc3ab4f1 Mon Sep 17 00:00:00 2001 From: To-om Date: Wed, 15 May 2019 15:31:45 +0200 Subject: [PATCH] #623 #913 Update Elasic4play to add support of ElasticSearch 6 --- project/Dependencies.scala | 4 +- .../app/controllers/ArtifactCtrl.scala | 157 +++++++++--------- .../app/controllers/StatusCtrl.scala | 4 +- thehive-backend/app/models/Migration.scala | 66 +++++--- thehive-backend/app/models/Roles.scala | 6 +- thehive-backend/app/models/package.scala | 6 +- .../app/services/CaseMergeSrv.scala | 2 +- thehive-backend/app/services/CaseSrv.scala | 4 +- 8 files changed, 133 insertions(+), 116 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index bb22b979d0..5af8e99cb1 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -16,11 +16,11 @@ object Dependencies { val guice = "com.typesafe.play" %% "play-guice" % version } - val scalaGuice = "net.codingwell" %% "scala-guice" % "4.2.1" + val scalaGuice = "net.codingwell" %% "scala-guice" % "4.2.3" val reflections = "org.reflections" % "reflections" % "0.9.11" val zip4j = "net.lingala.zip4j" % "zip4j" % "1.3.2" - val elastic4play = "org.thehive-project" %% "elastic4play" % "1.10.0" + val elastic4play = "org.thehive-project" %% "elastic4play" % "1.11.1" val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % "2.5.19" val akkaClusterTools = "com.typesafe.akka" %% "akka-cluster-tools" % "2.5.19" } diff --git a/thehive-backend/app/controllers/ArtifactCtrl.scala b/thehive-backend/app/controllers/ArtifactCtrl.scala index 1655fe3a97..9d9520a060 100644 --- a/thehive-backend/app/controllers/ArtifactCtrl.scala +++ b/thehive-backend/app/controllers/ArtifactCtrl.scala @@ -4,14 +4,14 @@ import java.io.FilterInputStream import java.nio.file.Files import scala.collection.JavaConverters._ -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ExecutionContext, Future} -import play.api.{ Configuration, Logger } +import play.api.{Configuration, Logger} import play.api.http.Status import play.api.libs.json.JsArray import play.api.mvc._ -import javax.inject.{ Inject, Singleton } +import javax.inject.{Inject, Singleton} import models.Roles import net.lingala.zip4j.core.ZipFile import net.lingala.zip4j.model.FileHeader @@ -19,12 +19,12 @@ import services.ArtifactSrv import org.elastic4play.controllers._ import org.elastic4play.models.JsonFormat.baseModelEntityWrites -import org.elastic4play.services.JsonFormat.{ aggReads, queryReads } +import org.elastic4play.services.JsonFormat.{aggReads, queryReads} import org.elastic4play.services._ -import org.elastic4play.{ BadRequestError, InternalError, Timed } +import org.elastic4play.{BadRequestError, InternalError, Timed} @Singleton -class ArtifactCtrl @Inject() ( +class ArtifactCtrl @Inject()( artifactSrv: ArtifactSrv, auxSrv: AuxSrv, tempSrv: TempSrv, @@ -33,7 +33,9 @@ class ArtifactCtrl @Inject() ( components: ControllerComponents, fieldsBodyParser: FieldsBodyParser, configuration: Configuration, - implicit val ec: ExecutionContext) extends AbstractController(components) with Status { + implicit val ec: ExecutionContext +) extends AbstractController(components) + with Status { private[ArtifactCtrl] lazy val logger = Logger(getClass) @@ -45,17 +47,15 @@ class ArtifactCtrl @Inject() ( val file = tempSrv.newTemporaryFile(fileName, "-fromZipFile") val input = zipFile.getInputStream(header) - val size = header.getUncompressedSize + val size = header.getUncompressedSize val sizedInput: FilterInputStream = new FilterInputStream(input) { var totalRead = 0 - override def read(): Int = { + override def read(): Int = if (totalRead < size) { totalRead += 1 super.read() - } - else throw BadRequestError("Error extracting file: output size doesn't match header") - } + } else throw BadRequestError("Error extracting file: output size doesn't match header") } Files.delete(file) val fileSize = Files.copy(sizedInput, file) @@ -70,67 +70,73 @@ class ArtifactCtrl @Inject() ( } @Timed - def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { - implicit request ⇒ - val fields = request.body - val data = fields.getStrings("data") - .getOrElse(fields.getString("data").toSeq) - .map(_.trim) // most observables don't accept leading or trailing space - .filterNot(_.isEmpty) - // if data is not multivalued, use simple API (not bulk API) - if (data.isEmpty) { - - fields.get("attachment") - .collect { - case FileInputValue(_, filepath, _) if fields.getBoolean("isZip").getOrElse(false) ⇒ - - val zipFile = new ZipFile(filepath.toFile) - val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] - - if (zipFile.isEncrypted) { - val pw = fields.getString("zipPassword") - .filterNot(_.isEmpty) - .getOrElse(configuration.get[String]("datastore.attachment.password")) - zipFile.setPassword(pw) + def create(caseId: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ + val fields = request.body + val data = fields + .getStrings("data") + .getOrElse(fields.getString("data").toSeq) + .map(_.trim) // most observables don't accept leading or trailing space + .filterNot(_.isEmpty) + // if data is not multivalued, use simple API (not bulk API) + if (data.isEmpty) { + + fields + .get("attachment") + .collect { + case FileInputValue(_, filepath, _) if fields.getBoolean("isZip").getOrElse(false) ⇒ + val zipFile = new ZipFile(filepath.toFile) + val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] + + if (zipFile.isEncrypted) { + val pw = fields + .getString("zipPassword") + .filterNot(_.isEmpty) + .getOrElse(configuration.get[String]("datastore.attachment.password")) + zipFile.setPassword(pw) + } + + val multiFields = files + .filterNot(_.isDirectory) + .flatMap(extractAndCheckSize(zipFile, _)) + .map { fiv ⇒ + fields + .unset("isZip") + .unset("zipPassword") + .set("dataType", "file") + .set("attachment", fiv) } - - val multiFields = files.filterNot(_.isDirectory) - .flatMap(extractAndCheckSize(zipFile, _)) - .map { fiv ⇒ - fields - .unset("isZip") - .unset("zipPassword") - .set("dataType", "file") - .set("attachment", fiv) - } - artifactSrv.create(caseId, multiFields) - .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) - } - .getOrElse { - artifactSrv.create(caseId, fields.unset("isZip").unset("zipPassword")) - .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) - } - } - else if (data.length == 1) { - artifactSrv.create(caseId, fields.set("data", data.head).unset("isZip").unset("zipPassword")) - .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) - } - else { - val multiFields = data.map(fields.set("data", _).unset("isZip").unset("zipPassword")) - artifactSrv.create(caseId, multiFields) - .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) - } + artifactSrv + .create(caseId, multiFields) + .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) + } + .getOrElse { + artifactSrv + .create(caseId, fields.unset("isZip").unset("zipPassword")) + .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) + } + } else if (data.length == 1) { + artifactSrv + .create(caseId, fields.set("data", data.head).unset("isZip").unset("zipPassword")) + .map(artifact ⇒ renderer.toOutput(CREATED, artifact)) + } else { + val multiFields = data.map(fields.set("data", _).unset("isZip").unset("zipPassword")) + artifactSrv + .create(caseId, multiFields) + .map(multiResult ⇒ renderer.toMultiOutput(CREATED, multiResult)) + } } @Timed def get(id: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ - artifactSrv.get(id) + artifactSrv + .get(id) .map(artifact ⇒ renderer.toOutput(OK, artifact)) } @Timed def update(id: String): Action[Fields] = authenticated(Roles.write).async(fieldsBodyParser) { implicit request ⇒ - artifactSrv.update(id, request.body.unset("attachment")) + artifactSrv + .update(id, request.body.unset("attachment")) .map(artifact ⇒ renderer.toOutput(OK, artifact)) } @@ -143,7 +149,8 @@ class ArtifactCtrl @Inject() ( @Timed def delete(id: String): Action[AnyContent] = authenticated(Roles.write).async { implicit request ⇒ - artifactSrv.delete(id) + artifactSrv + .delete(id) .map(_ ⇒ NoContent) } @@ -151,9 +158,9 @@ class ArtifactCtrl @Inject() ( def findInCase(caseId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ import org.elastic4play.services.QueryDSL._ val childQuery = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val query = and(childQuery, "_parent" ~= caseId) - val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) + val query = and(childQuery, withParent("case", caseId)) + val range = request.body.getString("range") + val sort = request.body.getStrings("sort").getOrElse(Nil) val (artifacts, total) = artifactSrv.find(query, range, sort) renderer.toOutput(OK, artifacts, total) @@ -161,14 +168,14 @@ class ArtifactCtrl @Inject() ( @Timed def find(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ - val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) - val nparent = request.body.getLong("nparent").getOrElse(0L).toInt + val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) + val range = request.body.getString("range") + val sort = request.body.getStrings("sort").getOrElse(Nil) + val nparent = request.body.getLong("nparent").getOrElse(0L).toInt val withStats = request.body.getBoolean("nstats").getOrElse(false) val (artifacts, total) = artifactSrv.find(query, range, sort) - val artifactWithCase = auxSrv(artifacts, nparent, withStats, removeUnaudited = false) + val artifactWithCase = auxSrv(artifacts, nparent, withStats, removeUnaudited = false) renderer.toOutput(OK, artifactWithCase, total) } @@ -176,10 +183,10 @@ class ArtifactCtrl @Inject() ( def findSimilar(artifactId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ artifactSrv.get(artifactId).flatMap { artifact ⇒ val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) + val sort = request.body.getStrings("sort").getOrElse(Nil) val (artifacts, total) = artifactSrv.findSimilar(artifact, range, sort) - val artifactWithCase = auxSrv(artifacts, 1, withStats = false, removeUnaudited = true) + val artifactWithCase = auxSrv(artifacts, 1, withStats = false, removeUnaudited = true) renderer.toOutput(OK, artifactWithCase, total) } } @@ -187,7 +194,7 @@ class ArtifactCtrl @Inject() ( @Timed def stats(): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] + val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] artifactSrv.stats(query, aggs).map(s ⇒ Ok(s)) } } diff --git a/thehive-backend/app/controllers/StatusCtrl.scala b/thehive-backend/app/controllers/StatusCtrl.scala index 81b846a30b..3ca7126db2 100644 --- a/thehive-backend/app/controllers/StatusCtrl.scala +++ b/thehive-backend/app/controllers/StatusCtrl.scala @@ -9,7 +9,7 @@ import play.api.Configuration import play.api.libs.json.{ JsBoolean, JsObject, JsString, Json } import play.api.libs.json.Json.toJsFieldJsValueWrapper import play.api.mvc.{ AbstractController, Action, AnyContent, ControllerComponents } -import com.sksamuel.elastic4s.ElasticDsl +import com.sksamuel.elastic4s.http.ElasticDsl import connectors.Connector import models.HealthStatus import org.elastic4play.Timed @@ -37,7 +37,7 @@ class StatusCtrl @Inject() ( "Elastic4Play" → getVersion(classOf[Timed]), "Play" → getVersion(classOf[AbstractController]), "Elastic4s" → getVersion(classOf[ElasticDsl]), - "ElasticSearch" → getVersion(classOf[org.elasticsearch.Build])), + "ElasticSearch" → getVersion(classOf[org.elasticsearch.client.Node])), "connectors" → JsObject(connectors.map(c ⇒ c.name → c.status).toSeq), "health" → Json.obj("elasticsearch" → clusterStatusName), "config" → Json.obj( diff --git a/thehive-backend/app/models/Migration.scala b/thehive-backend/app/models/Migration.scala index 6f35c01d8f..c40524c9cd 100644 --- a/thehive-backend/app/models/Migration.scala +++ b/thehive-backend/app/models/Migration.scala @@ -2,7 +2,6 @@ package models import java.nio.file.{ Files, Path } import java.util.Date -import javax.inject.{ Inject, Singleton } import scala.collection.JavaConverters._ import scala.concurrent.{ ExecutionContext, Future } @@ -16,38 +15,39 @@ import play.api.{ Configuration, Environment, Logger } import akka.NotUsed import akka.stream.Materializer import akka.stream.scaladsl.Source +import javax.inject.{ Inject, Singleton } import services.{ AlertSrv, DashboardSrv } import org.elastic4play.ConflictError import org.elastic4play.controllers.Fields import org.elastic4play.services.JsonFormat.attachmentFormat -import org.elastic4play.services._ +import org.elastic4play.services.{ IndexType, _ } import org.elastic4play.utils.Hasher case class UpdateMispAlertArtifact() extends EventMessage @Singleton class Migration( - mispCaseTemplate: Option[String], - mainHash: String, - extraHashes: Seq[String], - datastoreName: String, - dblists: DBLists, - eventSrv: EventSrv, - dashboardSrv: DashboardSrv, - userSrv: UserSrv, - environment: Environment, - implicit val ec: ExecutionContext, - implicit val materializer: Materializer) extends MigrationOperations { + mispCaseTemplate: Option[String], + mainHash: String, + extraHashes: Seq[String], + datastoreName: String, + dblists: DBLists, + eventSrv: EventSrv, + dashboardSrv: DashboardSrv, + userSrv: UserSrv, + environment: Environment, + implicit val ec: ExecutionContext, + implicit val materializer: Materializer) extends MigrationOperations { @Inject() def this( - configuration: Configuration, - dblists: DBLists, - eventSrv: EventSrv, - dashboardSrv: DashboardSrv, - userSrv: UserSrv, - environment: Environment, - ec: ExecutionContext, - materializer: Materializer) = { + configuration: Configuration, + dblists: DBLists, + eventSrv: EventSrv, + dashboardSrv: DashboardSrv, + userSrv: UserSrv, + environment: Environment, + ec: ExecutionContext, + materializer: Materializer) = { this( configuration.getOptional[String]("misp.caseTemplate"), configuration.get[String]("datastore.hash.main"), @@ -82,7 +82,7 @@ class Migration( .map(_ ⇒ ()) .recover { case _: ConflictError ⇒ - case error ⇒ logger.error(s"Failed to add dataType $dt during migration", error) + case error ⇒ logger.error(s"Failed to add dataType $dt during migration", error) } } .map(_ ⇒ ()) @@ -122,6 +122,8 @@ class Migration( } + override def indexType(version: Int): IndexType.Value = if (version > 14) IndexType.indexWithoutMappingTypes else IndexType.indexWithMappingTypes + override val operations: PartialFunction[DatabaseState, Seq[Operation]] = { case DatabaseState(version) if version < 7 ⇒ Nil case DatabaseState(7) ⇒ @@ -167,7 +169,7 @@ class Migration( case "tlp:white" ⇒ 0L case "tlp:green" ⇒ 1L case "tlp:amber" ⇒ 2L - case "tlp:red" ⇒ 3L + case "tlp:red" ⇒ 3L } .getOrElse(2L) val source = (misp \ "serverId").asOpt[String].getOrElse("") @@ -192,13 +194,14 @@ class Migration( "follow" → (misp \ "follow").as[JsBoolean]) }, removeEntity("audit")(o ⇒ (o \ "objectType").asOpt[String].contains("alert"))) - case ds @ DatabaseState(9) ⇒ + case ds@DatabaseState(9) ⇒ object Base64 { def unapply(data: String): Option[Array[Byte]] = Try(java.util.Base64.getDecoder.decode(data)).toOption } // store attachment id and check to prevent document already exists error var dataIds = Set.empty[String] + def containsOrAdd(id: String) = { dataIds.synchronized { if (dataIds.contains(id)) true @@ -236,7 +239,7 @@ class Migration( (artifact \ "data").asOpt[String] .collect { // get attachment encoded in data field - case AlertSrv.dataExtractor(filename, contentType, data @ Base64(rawData)) ⇒ + case AlertSrv.dataExtractor(filename, contentType, data@Base64(rawData)) ⇒ val attachmentId = mainHasher.fromByteArray(rawData).head.toString() ds.getEntity(datastoreName, s"${attachmentId}_0") .map(_ ⇒ Nil) @@ -270,7 +273,7 @@ class Migration( mapAttribute("alert", "status") { case JsString("Update") ⇒ JsString("Updated") case JsString("Ignore") ⇒ JsString("Ignored") - case other ⇒ other + case other ⇒ other }, // Fix double encode of metrics mapEntity("dblist") { @@ -300,7 +303,7 @@ class Migration( caseTemplate - "metricNames" + ("metrics" → metrics) }, addAttribute("case_artifact", "sighted" → JsFalse)) - case ds @ DatabaseState(12) ⇒ + case ds@DatabaseState(12) ⇒ Seq( // Remove alert artifacts in audit trail mapEntity("audit") { @@ -341,6 +344,15 @@ class Migration( addAttribute("alert", "customFields" → JsObject.empty), addAttribute("case_task", "group" → JsString("default")), addAttribute("case", "pap" → JsNumber(2))) + case DatabaseState(14) ⇒ Seq( + mapEntity("sequence") { seq => + val oldId = (seq \ "_id").as[String] + val counter = (seq \ "counter").as[JsNumber] + seq - "counter" - "_routing" + + ("_id" -> JsString("sequence_" + oldId)) + + ("sequenceCounter" -> counter) + } + ) } private def generateAlertId(alert: JsObject): String = { diff --git a/thehive-backend/app/models/Roles.scala b/thehive-backend/app/models/Roles.scala index 927b57e60f..64b1867337 100644 --- a/thehive-backend/app/models/Roles.scala +++ b/thehive-backend/app/models/Roles.scala @@ -2,8 +2,8 @@ package models import play.api.libs.json.{ JsString, JsValue } -import com.sksamuel.elastic4s.ElasticDsl.keywordField -import com.sksamuel.elastic4s.mappings.KeywordFieldDefinition +import com.sksamuel.elastic4s.http.ElasticDsl.keywordField +import com.sksamuel.elastic4s.mappings.KeywordField import org.scalactic.{ Every, Good, One, Or } import models.JsonFormat.roleFormat @@ -46,5 +46,5 @@ object RoleAttributeFormat extends AttributeFormat[Role]("role") { } - override def elasticType(attributeName: String): KeywordFieldDefinition = keywordField(attributeName) + override def elasticType(attributeName: String): KeywordField = keywordField(attributeName) } \ No newline at end of file diff --git a/thehive-backend/app/models/package.scala b/thehive-backend/app/models/package.scala index 9447b30217..5817120386 100644 --- a/thehive-backend/app/models/package.scala +++ b/thehive-backend/app/models/package.scala @@ -1,5 +1,3 @@ - - package object models { - val modelVersion = 14 -} \ No newline at end of file + val modelVersion = 15 +} diff --git a/thehive-backend/app/services/CaseMergeSrv.scala b/thehive-backend/app/services/CaseMergeSrv.scala index 0a7099cf76..000e4c5f82 100644 --- a/thehive-backend/app/services/CaseMergeSrv.scala +++ b/thehive-backend/app/services/CaseMergeSrv.scala @@ -137,7 +137,7 @@ class CaseMergeSrv @Inject() ( private[services] def baseFields(entity: BaseEntity): Fields = Fields(entity.attributes - "_id" - "_routing" - "_parent" - "_type" - "_version" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user") private[services] def mergeLogs(oldTask: Task, newTask: Task)(implicit authContext: AuthContext): Future[Done] = { - logSrv.find("_parent" ~= oldTask.id, Some("all"), Nil)._1 + logSrv.find(withParent("case_task",oldTask.id), Some("all"), Nil)._1 .mapAsyncUnordered(5) { log ⇒ logSrv.create(newTask, baseFields(log)) } diff --git a/thehive-backend/app/services/CaseSrv.scala b/thehive-backend/app/services/CaseSrv.scala index b350de6815..5e3bfd7a6f 100644 --- a/thehive-backend/app/services/CaseSrv.scala +++ b/thehive-backend/app/services/CaseSrv.scala @@ -131,11 +131,11 @@ class CaseSrv @Inject() ( import org.elastic4play.services.QueryDSL._ for { taskStats ← taskSrv.stats(and( - "_parent" ~= id, + withParent("case", id), "status" in ("Waiting", "InProgress", "Completed")), Seq(groupByField("status", selectCount))) artifactStats ← findSrv( artifactModel, - and("_parent" ~= id, "status" ~= "Ok"), + and(withParent("case", id), "status" ~= "Ok"), groupByField("status", selectCount)) } yield Json.obj(("tasks", taskStats), ("artifacts", artifactStats)) }