From 584d6d4d8ec06a393d6a6bbd0b98254ef08d0018 Mon Sep 17 00:00:00 2001 From: To-om Date: Fri, 2 Dec 2016 14:08:38 +0100 Subject: [PATCH] #53 Create Cortex connector --- build.sbt | 11 +- .../app/controllers/Analyzer.scala | 51 ------ .../app/controllers/Artifact.scala | 13 +- thehive-backend/app/controllers/Job.scala | 68 -------- thehive-backend/app/models/Analyzer.scala | 142 ---------------- thehive-backend/app/models/JsonFormat.scala | 9 -- .../app/services/AnalyzerSrv.scala | 150 ----------------- .../app/services/ArtifactSrv.scala | 3 +- .../app/services/CaseMergeSrv.scala | 26 +-- thehive-backend/conf/routes | 152 ++++++++---------- thehive-cortex/.gitignore | 1 + .../connectors/cortex/CortexConnector.scala | 22 +++ .../app/connectors/cortex/CortextCtrl.scala | 98 +++++++++++ .../app/connectors/cortex/JsonFormat.scala | 11 ++ .../connectors/cortex/models/Analyzer.scala | 17 ++ .../connectors/cortex/models/Artifact.scala | 9 ++ .../app/connectors/cortex}/models/Job.scala | 102 ++++++------ .../connectors/cortex/models/JsonFormat.scala | 34 ++++ .../cortex/services/CortexClient.scala | 69 ++++++++ .../cortex/services/CortexSrv.scala | 142 ++++++++++++++++ .../connectors/cortex}/services/JobSrv.scala | 83 ++++------ thehive-cortex/build.sbt | 8 + .../app/connectors/misp/MispModule.scala | 1 + 23 files changed, 584 insertions(+), 638 deletions(-) delete mode 100644 thehive-backend/app/controllers/Analyzer.scala delete mode 100644 thehive-backend/app/controllers/Job.scala delete mode 100644 thehive-backend/app/models/Analyzer.scala delete mode 100644 thehive-backend/app/services/AnalyzerSrv.scala create mode 100644 thehive-cortex/.gitignore create mode 100644 thehive-cortex/app/connectors/cortex/CortexConnector.scala create mode 100644 thehive-cortex/app/connectors/cortex/CortextCtrl.scala create mode 100644 thehive-cortex/app/connectors/cortex/JsonFormat.scala create mode 100644 thehive-cortex/app/connectors/cortex/models/Analyzer.scala create mode 100644 thehive-cortex/app/connectors/cortex/models/Artifact.scala rename {thehive-backend/app => thehive-cortex/app/connectors/cortex}/models/Job.scala (75%) create mode 100644 thehive-cortex/app/connectors/cortex/models/JsonFormat.scala create mode 100644 thehive-cortex/app/connectors/cortex/services/CortexClient.scala create mode 100644 thehive-cortex/app/connectors/cortex/services/CortexSrv.scala rename {thehive-backend/app => thehive-cortex/app/connectors/cortex}/services/JobSrv.scala (55%) create mode 100644 thehive-cortex/build.sbt diff --git a/build.sbt b/build.sbt index d098997ec5..1446efe936 100644 --- a/build.sbt +++ b/build.sbt @@ -11,10 +11,15 @@ lazy val thehiveMisp = (project in file("thehive-misp")) .dependsOn(thehiveBackend) .settings(publish := {}) +lazy val thehiveCortex = (project in file("thehive-cortex")) + .dependsOn(thehiveBackend) + .settings(publish := {}) + .settings(SbtScalariform.scalariformSettings: _*) + lazy val main = (project in file(".")) .enablePlugins(PlayScala) - .dependsOn(thehiveBackend, thehiveMetrics, thehiveMisp) - .aggregate(thehiveBackend, thehiveMetrics, thehiveMisp) + .dependsOn(thehiveBackend, thehiveMetrics, thehiveMisp, thehiveCortex) + .aggregate(thehiveBackend, thehiveMetrics, thehiveMisp, thehiveCortex) .settings(aggregate in Docker := false) .settings(PublishToBinTray.settings: _*) .settings(Release.settings: _*) @@ -113,8 +118,6 @@ import scalariform.formatter.preferences._ import com.typesafe.sbt.SbtScalariform import com.typesafe.sbt.SbtScalariform.ScalariformKeys -SbtScalariform.defaultScalariformSettings - ScalariformKeys.preferences in ThisBuild := ScalariformKeys.preferences.value .setPreference(AlignParameters, false) // .setPreference(FirstParameterOnNewline, Force) diff --git a/thehive-backend/app/controllers/Analyzer.scala b/thehive-backend/app/controllers/Analyzer.scala deleted file mode 100644 index 77b1361de8..0000000000 --- a/thehive-backend/app/controllers/Analyzer.scala +++ /dev/null @@ -1,51 +0,0 @@ -package controllers - -import javax.inject.{ Inject, Singleton } - -import scala.concurrent.ExecutionContext - -import play.api.mvc.Controller -import play.api.http.Status - -import org.elastic4play.controllers.{ Fields, Authenticated, Renderer, FieldsBodyParser } -import org.elastic4play.services.{ Role, QueryDSL, AuxSrv, QueryDef } -import org.elastic4play.services.JsonFormat._ -import org.elastic4play.models.JsonFormat._ -import services.AnalyzerSrv -import models.JsonFormat._ -import org.elastic4play.Timed - -@Singleton -class AnalyzerCtrl @Inject() ( - analyzerSrv: AnalyzerSrv, - auxSrv: AuxSrv, - authenticated: Authenticated, - renderer: Renderer, - fieldsBodyParser: FieldsBodyParser, - implicit val ec: ExecutionContext) extends Controller with Status { - - @Timed - def get(id: String) = authenticated(Role.read).async { implicit request ⇒ - analyzerSrv.get(id.replaceAll("\\.", "_")) // FIXME replace "." by "_" should not be usefull after migration - .map(analyzer ⇒ renderer.toOutput(OK, analyzer)) - } - - @Timed - def find = authenticated(Role.read).async(fieldsBodyParser) { implicit request ⇒ - val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) - val nparent = request.body.getLong("nparent").getOrElse(0L).toInt - val withStats = request.body.getBoolean("nstats").getOrElse(false) - - val (analyzers, total) = analyzerSrv.find(query, range, sort) - val analyzersWithStats = auxSrv(analyzers, nparent, withStats) - renderer.toOutput(OK, analyzersWithStats, total) - } - - @Timed - def getReport(analyzerId: String, flavor: String) = authenticated(Role.read).async { request ⇒ - analyzerSrv.getReport(analyzerId.replaceAll("\\.", "_"), flavor) // FIXME replace "." by "_" should not be usefull after migration - .map { reportTemplate ⇒ Ok(reportTemplate) } - } -} \ No newline at end of file diff --git a/thehive-backend/app/controllers/Artifact.scala b/thehive-backend/app/controllers/Artifact.scala index 575773ba6e..59d49732ea 100644 --- a/thehive-backend/app/controllers/Artifact.scala +++ b/thehive-backend/app/controllers/Artifact.scala @@ -5,8 +5,7 @@ import javax.inject.{ Inject, Singleton } import scala.concurrent.{ ExecutionContext, Future } import play.api.http.Status -import play.api.libs.json.{ JsArray, Json } -import play.api.libs.json.Json.toJsFieldJsValueWrapper +import play.api.libs.json.JsArray import play.api.mvc.Controller import org.elastic4play.{ BadRequestError, Timed } @@ -16,13 +15,11 @@ import org.elastic4play.services.{ Agg, AuxSrv } import org.elastic4play.services.{ QueryDSL, QueryDef, Role } import org.elastic4play.services.JsonFormat.{ aggReads, queryReads } -import models.JsonFormat.analyzerWrites -import services.{ AnalyzerSrv, ArtifactSrv } +import services.ArtifactSrv @Singleton class ArtifactCtrl @Inject() ( artifactSrv: ArtifactSrv, - analyzerSrv: AnalyzerSrv, auxSrv: AuxSrv, authenticated: Authenticated, renderer: Renderer, @@ -54,10 +51,8 @@ class ArtifactCtrl @Inject() ( @Timed def get(id: String) = authenticated(Role.read).async(fieldsBodyParser) { implicit request ⇒ - for { - artifact ← artifactSrv.get(id, request.body.getStrings("fields").map("dataType" +: _)) - analyzers ← analyzerSrv.availableFor(artifact.dataType()).map(multiResult ⇒ Json.toJson(multiResult)) - } yield renderer.toOutput(OK, Json.obj("artifact" → artifact, "analyzers" → analyzers)) + artifactSrv.get(id, request.body.getStrings("fields").map("dataType" +: _)) + .map(artifact ⇒ renderer.toOutput(OK, artifact)) } @Timed diff --git a/thehive-backend/app/controllers/Job.scala b/thehive-backend/app/controllers/Job.scala deleted file mode 100644 index f001b69ec1..0000000000 --- a/thehive-backend/app/controllers/Job.scala +++ /dev/null @@ -1,68 +0,0 @@ -package controllers - -import javax.inject.{ Inject, Singleton } - -import scala.concurrent.ExecutionContext -import scala.reflect.runtime.universe - -import play.api.http.Status -import play.api.mvc.Controller - -import org.elastic4play.{ BadRequestError, Timed } -import org.elastic4play.controllers.{ Authenticated, FieldsBodyParser, Renderer } -import org.elastic4play.models.JsonFormat.baseModelEntityWrites -import org.elastic4play.services.{ QueryDSL, QueryDef, Role } -import org.elastic4play.services.Agg -import org.elastic4play.services.JsonFormat.{ aggReads, queryReads } - -import services.JobSrv - -@Singleton -class JobCtrl @Inject() ( - jobSrv: JobSrv, - authenticated: Authenticated, - renderer: Renderer, - fieldsBodyParser: FieldsBodyParser, - implicit val ec: ExecutionContext) extends Controller with Status { - - @Timed - def create(artifactId: String) = authenticated(Role.write).async(fieldsBodyParser) { implicit request ⇒ - jobSrv.create(artifactId, request.body) - .map(job ⇒ renderer.toOutput(CREATED, job)) - } - - @Timed - def get(id: String) = authenticated(Role.read).async { implicit request ⇒ - jobSrv.get(id) - .map(artifact ⇒ renderer.toOutput(OK, artifact)) - } - - @Timed - def findInArtifact(artifactId: String) = authenticated(Role.read).async(fieldsBodyParser) { implicit request ⇒ - import org.elastic4play.services.QueryDSL._ - val childQuery = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val query = and(childQuery, "_parent" ~= artifactId) - val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) - - val (jobs, total) = jobSrv.find(query, range, sort) - renderer.toOutput(OK, jobs, total) - } - - @Timed - def find = authenticated(Role.read).async(fieldsBodyParser) { implicit request ⇒ - val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val range = request.body.getString("range") - val sort = request.body.getStrings("sort").getOrElse(Nil) - - val (jobs, total) = jobSrv.find(query, range, sort) - renderer.toOutput(OK, jobs, total) - } - - @Timed - def stats() = authenticated(Role.read).async(fieldsBodyParser) { implicit request ⇒ - val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) - val agg = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Agg] - jobSrv.stats(query, agg).map(s ⇒ Ok(s)) - } -} \ No newline at end of file diff --git a/thehive-backend/app/models/Analyzer.scala b/thehive-backend/app/models/Analyzer.scala deleted file mode 100644 index c7868efc2a..0000000000 --- a/thehive-backend/app/models/Analyzer.scala +++ /dev/null @@ -1,142 +0,0 @@ -package models - -import java.io.{ BufferedReader, InputStreamReader } -import java.nio.file.{ Files, Path } - -import scala.concurrent.{ ExecutionContext, Future, blocking } -import scala.sys.process.{ BasicIO, Process, ProcessIO } - -import akka.stream.Materializer -import akka.stream.scaladsl.FileIO - -import play.api.Logger -import play.api.libs.json.{ JsObject, JsString, Json } - -import org.elastic4play.models.{ AttributeDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, EntityDef, ModelDef } -import org.elastic4play.services.{ AttachmentSrv, AuthContext, TempSrv } - -import com.fasterxml.jackson.core.JsonParseException -import com.fasterxml.jackson.databind.JsonMappingException - -/** - * New analyzers should : - * - create a file (or stdout ?) containing the main report in JSON format - * - create optional additional files - * - the main report can contain artifacts. They are materialized by a JsObject that have an attribute "_type":"artifact" and all possible artifact attributes - * - the main report should reference additional files as artifact. Filename must be in "filename" attribute. - * All output files must be in a unique working directory, dedicated to the current analyze - */ -abstract class AnalyzerInfo { - val name: String - val version: String - val description: String - val dataTypeList: Seq[String] - val id = (name + "_" + version).replaceAll("\\.", "_") - def getReport(flavor: String): Future[String] -} - -sealed abstract class Analyzer extends AnalyzerInfo { - def analyze(attachmentSrv: AttachmentSrv, artifact: Artifact)(implicit authContext: AuthContext): Future[(JobStatus.Type, JsObject)] -} - -case class ExternalAnalyzer( - name: String, - version: String, - description: String, - dataTypeList: Seq[String], - command: Path, - config: JsObject, - reportPath: Path, - tempSrv: TempSrv)(implicit val ec: ExecutionContext, val mat: Materializer) extends Analyzer { - val log = Logger(getClass) - private val osexec = if (System.getProperty("os.name").toLowerCase.contains("win")) - (c: String) ⇒ s"""cmd /c $c""" - else - (c: String) ⇒ s"""sh -c "./$c" """ - - private[ExternalAnalyzer] def analyzeHelper(attachmentSrv: AttachmentSrv, artifact: Artifact)(process: JsObject ⇒ (JobStatus.Type, JsObject)): Future[(JobStatus.Type, JsObject)] = { - artifact.attachment() match { - case Some(attachment) ⇒ - val artifactFile = Files.createTempFile("TheHive_", s"_$id.tmp").toAbsolutePath() - attachmentSrv.source(attachment.id).runWith(FileIO.toPath(artifactFile)) - .map { - case result if result.wasSuccessful ⇒ artifact.attributes + ("config" → config) + ("file" → JsString(artifactFile.toString)) - case result ⇒ throw result.getError - } - .map(process) - .andThen { case _ ⇒ Files.delete(artifactFile) } - case None ⇒ Future { process(artifact.attributes + ("config" → config)) } - } - } - - def analyze(attachmentSrv: AttachmentSrv, artifact: Artifact)(implicit authContext: AuthContext): Future[(JobStatus.Type, JsObject)] = { - val output = new StringBuffer - val error = new StringBuffer - - analyzeHelper(attachmentSrv, artifact) { input ⇒ - blocking { - log.info(s"Execute ${osexec(command.getFileName.toString)} in ${command.getParent.toFile.getAbsoluteFile.getName}") - val exitValue = Process(osexec(command.getFileName.toString), command.getParent.toFile).run( - new ProcessIO( - { stdin ⇒ - try stdin.write(input.toString.getBytes("UTF-8")) - finally stdin.close() - }, - { stdout ⇒ - val reader = new BufferedReader(new InputStreamReader(stdout, "UTF-8")) - try BasicIO.processLinesFully { line ⇒ - output.append(line).append(System.lineSeparator()) - () - }(reader.readLine) - finally reader.close() - }, - { stderr ⇒ - val reader = new BufferedReader(new InputStreamReader(stderr, "UTF-8")) - try BasicIO.processLinesFully { line ⇒ - error.append(line).append(System.lineSeparator()) - () - }(reader.readLine) - finally reader.close() - })).exitValue - val result = Json.parse(output.toString).as[JsObject] - if (exitValue == 0) - (JobStatus.Success, result) - else - (JobStatus.Failure, result) - } - } - .recover { - case _: JsonMappingException ⇒ - error.append(output) - (JobStatus.Failure, JsObject(Seq("errorMessage" → JsString(s"Error: Invalid output\n$error")))) - case _: JsonParseException ⇒ - error.append(output) - (JobStatus.Failure, JsObject(Seq("errorMessage" → JsString(s"Error: Invalid output\n$error")))) - case t: Throwable ⇒ - (JobStatus.Failure, JsObject(Seq("errorMessage" → JsString(t.getMessage + ":" + t.getStackTrace().mkString("", "\n\t", "\n"))))) - } - } - - def getReport(flavor: String): Future[String] = - Future { scala.io.Source.fromFile(reportPath.resolve(s"$flavor.html").toFile()).mkString } -} - -abstract class JavaAnalyzer extends Analyzer - -trait AnalyzerAttributes { _: AttributeDef ⇒ - val analyzerId = attribute("_id", F.stringFmt, "Analyzer ID", O.readonly) - val analyzerName = attribute("name", F.stringFmt, "Name of the analyzer", O.readonly) - val version = attribute("version", F.stringFmt, "Version", O.readonly) - val description = attribute("description", F.textFmt, "Description", O.readonly) - val dataTypeList = multiAttribute("dataTypeList", F.stringFmt, "List of accepted data types") -} -class AnalyzerModel extends ModelDef[AnalyzerModel, AnalyzerDesc]("analyzer") with AnalyzerAttributes -class AnalyzerDesc(model: AnalyzerModel, attributes: JsObject) extends EntityDef[AnalyzerModel, AnalyzerDesc](model, attributes) with AnalyzerAttributes { analyzer ⇒ - def info = new AnalyzerInfo { - val name = analyzer.analyzerName() - val version = analyzer.version() - val description = analyzer.description() - val dataTypeList = analyzer.dataTypeList() - def getReport(flavor: String): Future[String] = Future.successful("") - } -} \ No newline at end of file diff --git a/thehive-backend/app/models/JsonFormat.scala b/thehive-backend/app/models/JsonFormat.scala index 531923ec58..7059220b5f 100644 --- a/thehive-backend/app/models/JsonFormat.scala +++ b/thehive-backend/app/models/JsonFormat.scala @@ -14,18 +14,9 @@ object JsonFormat { implicit val caseResolutionStatusFormat = enumFormat(CaseResolutionStatus) implicit val caseImpactStatusFormat = enumFormat(CaseImpactStatus) implicit val artifactStatusFormat = enumFormat(ArtifactStatus) - implicit val jobStatusFormat = enumFormat(JobStatus) implicit val taskStatusFormat = enumFormat(TaskStatus) implicit val logStatusFormat = enumFormat(LogStatus) implicit val caseTemplateStatusFormat = enumFormat(CaseTemplateStatus) implicit val pathWrites: Writes[Path] = Writes((value: Path) ⇒ JsString(value.toString)) - - implicit val analyzerWrites: Writes[Analyzer] = Writes((analyzer: Analyzer) ⇒ - Json.obj( - "name" → analyzer.name, - "version" → analyzer.version, - "description" → analyzer.description, - "dataTypeList" → analyzer.dataTypeList, - "id" → analyzer.id)) } \ No newline at end of file diff --git a/thehive-backend/app/services/AnalyzerSrv.scala b/thehive-backend/app/services/AnalyzerSrv.scala deleted file mode 100644 index 4d55c48886..0000000000 --- a/thehive-backend/app/services/AnalyzerSrv.scala +++ /dev/null @@ -1,150 +0,0 @@ -package services - -import java.io.File -import java.nio.file.{ Files, Path, Paths } - -import javax.inject.{ Inject, Singleton } - -import scala.annotation.implicitNotFound -import scala.collection.JavaConversions.{ asScalaSet, iterableAsScalaIterable } -import scala.concurrent.{ ExecutionContext, Future } - -import akka.NotUsed -import akka.stream.Materializer -import akka.stream.scaladsl.Source - -import play.api.{ Configuration, Logger } -import play.api.cache.CacheApi -import play.api.libs.json.JsObject -import play.api.libs.json.JsValue.jsValueToJsLookup -import play.api.libs.json.Json - -import org.reflections.Reflections - -import org.elastic4play.NotFoundError -import org.elastic4play.controllers.Fields -import org.elastic4play.services.{ CreateSrv, FindSrv, GetSrv } -import org.elastic4play.services.{ QueryDef, TempSrv } -import org.elastic4play.services.JsonFormat.configWrites - -import models.{ Analyzer, AnalyzerDesc, AnalyzerModel, ExternalAnalyzer, JavaAnalyzer } -import models.JsonFormat.analyzerWrites - -@Singleton -class AnalyzerSrv( - analyzerPath: Path, - analyzerPackages: Seq[String], - analyzerConfig: JsObject, - analyzerDefaultReportPath: Path, - analyzerModel: AnalyzerModel, - cache: CacheApi, - getSrv: GetSrv, - createSrv: CreateSrv, - findSrv: FindSrv, - userSrv: UserSrv, - tempSrv: TempSrv, - implicit val ec: ExecutionContext, - implicit val mat: Materializer) { - - @Inject def this( - configuration: Configuration, - analyzerModel: AnalyzerModel, - cache: CacheApi, - getSrv: GetSrv, - createSrv: CreateSrv, - findSrv: FindSrv, - userSrv: UserSrv, - tempSrv: TempSrv, - ec: ExecutionContext, - mat: Materializer) = - this( - Paths.get(configuration.getString("analyzer.path").get), - configuration.getStringSeq("analyzer.packages").get, - configWrites.writes(configuration.getConfig("analyzer.config").get), - Paths.get(configuration.getString("analyzer.defaultReport").get), - analyzerModel, - cache, - getSrv, - createSrv, - findSrv, - userSrv, - tempSrv, - ec, - mat) - - val log = Logger(getClass) - - def get(id: String): Future[Analyzer] = - localAnalyzers - .find(_.id == id) - .fold[Future[Analyzer]](Future.failed(NotFoundError(s"analyzer $id not found")))(a ⇒ Future.successful(a)) - - def find(queryDef: QueryDef, range: Option[String], sortBy: Seq[String]): (Source[AnalyzerDesc, NotUsed], Future[Long]) = { - findSrv[AnalyzerModel, AnalyzerDesc](analyzerModel, queryDef, range, sortBy) - } - - def getDesc(id: String) = getSrv[AnalyzerModel, AnalyzerDesc](analyzerModel, id) - - private def readInfo(file: Path) = { - val source = scala.io.Source.fromFile(file.toFile()) - try { - Json.parse(source.mkString) - } - finally { source.close() } - } - - private def externalAnalyzers = for { - infoFile ← Files.newDirectoryStream(analyzerPath, "*.json").toSeq - if Files.isReadable(infoFile) - info = readInfo(infoFile) - name ← (info \ "name").asOpt[String] orElse { log.warn(s"name is missing in $infoFile"); None } - version ← (info \ "version").asOpt[String] orElse { log.warn(s"version is missing in $infoFile"); None } - description ← (info \ "description").asOpt[String] orElse { log.warn(s"description is missing in $infoFile"); None } - dataTypeList ← (info \ "dataTypeList").asOpt[Seq[String]] orElse { log.warn(s"dataTypeList is missing in $infoFile"); None } - command ← (info \ "command").asOpt[String] orElse { log.warn(s"command is missing in $infoFile"); None } - config = (info \ "config").asOpt[JsObject].getOrElse(JsObject(Nil)) - baseConfig = (info \ "baseConfig").asOpt[String].flatMap(c ⇒ (analyzerConfig \ c).asOpt[JsObject]).getOrElse(JsObject(Nil)) - reportPath ← (info \ "report").asOpt[String].map(r ⇒ analyzerPath.resolve(r)) - absoluteCommand = analyzerPath.resolve(Paths.get(command.replaceAll("[\\/]", File.separator))) - _ = log.info(s"Register analyzer $name $version (${(name + "_" + version).replaceAll("\\.", "_")})") - } yield ExternalAnalyzer(name, version, description, dataTypeList, absoluteCommand, baseConfig.deepMerge(config), reportPath, tempSrv) - - private lazy val javaAnalyzers = { - new Reflections(new org.reflections.util.ConfigurationBuilder() - .forPackages(analyzerPackages: _*) - .setScanners(new org.reflections.scanners.SubTypesScanner(false))) - .getSubTypesOf(classOf[JavaAnalyzer]) - .filterNot(c ⇒ java.lang.reflect.Modifier.isAbstract(c.getModifiers)) - .map(c ⇒ c.newInstance) - .toSeq - } - - lazy val localAnalyzers = { - val analyzers = (externalAnalyzers ++ javaAnalyzers) - userSrv.inInitAuthContext { implicit authContext ⇒ - analyzers.foreach { analyzer ⇒ - val fields = Fields(Json.toJson(analyzer).as[JsObject]) - .set("_id", analyzer.id) - .unset("id") - createSrv[AnalyzerModel, AnalyzerDesc](analyzerModel, fields) - .onFailure { case t ⇒ log.error(s"Analyzer registration of ${analyzer.id} failed", t) } - } - Future.successful(analyzers) - } - analyzers - - } - - def availableFor(dataType: String): Future[Seq[Analyzer]] = Future.successful { - localAnalyzers.filter { _.dataTypeList.contains(dataType) } - } - - def getReport(analyzerId: String, flavor: String) = { - get(analyzerId) - .flatMap { analyzer ⇒ - analyzer.getReport(flavor) - } - .fallbackTo(Future(scala.io.Source.fromFile(analyzerDefaultReportPath.resolve(flavor + ".html").toFile).mkString)) - .recover { case _ ⇒ "" } - } -} \ No newline at end of file diff --git a/thehive-backend/app/services/ArtifactSrv.scala b/thehive-backend/app/services/ArtifactSrv.scala index 813a8514cb..2a32adcd69 100644 --- a/thehive-backend/app/services/ArtifactSrv.scala +++ b/thehive-backend/app/services/ArtifactSrv.scala @@ -11,7 +11,7 @@ import org.elastic4play.CreateError import org.elastic4play.controllers.Fields import org.elastic4play.services.{ Agg, AuthContext, CreateSrv, DeleteSrv, FieldsSrv, FindSrv, GetSrv, QueryDSL, QueryDef, UpdateSrv } -import models.{ Artifact, ArtifactModel, ArtifactStatus, Case, CaseModel, JobModel } +import models.{ Artifact, ArtifactModel, ArtifactStatus, Case, CaseModel } import org.elastic4play.utils.{ RichFuture, RichOr } import models.CaseStatus import models.CaseResolutionStatus @@ -20,7 +20,6 @@ import models.CaseResolutionStatus class ArtifactSrv @Inject() ( artifactModel: ArtifactModel, caseModel: CaseModel, - jobModel: JobModel, createSrv: CreateSrv, getSrv: GetSrv, updateSrv: UpdateSrv, diff --git a/thehive-backend/app/services/CaseMergeSrv.scala b/thehive-backend/app/services/CaseMergeSrv.scala index 7172b58462..9aed96a807 100644 --- a/thehive-backend/app/services/CaseMergeSrv.scala +++ b/thehive-backend/app/services/CaseMergeSrv.scala @@ -21,12 +21,16 @@ import org.elastic4play.services.AuthContext import org.elastic4play.services.JsonFormat.log import org.elastic4play.services.QueryDSL -import models.{ Artifact, ArtifactStatus, Case, CaseImpactStatus, CaseResolutionStatus, CaseStatus, JobStatus, Task } +import models.{ Artifact, ArtifactStatus, Case, CaseImpactStatus, CaseResolutionStatus, CaseStatus, Task } import play.api.Logger import scala.util.Success import scala.util.Failure import models.TaskStatus import models.LogStatus +import org.elastic4play.services.EventMessage +import org.elastic4play.services.EventSrv + +case class MergeArtifact(newArtifact: Artifact, artifacts: Seq[Artifact], authContext: AuthContext) extends EventMessage @Singleton class CaseMergeSrv @Inject() ( @@ -34,7 +38,7 @@ class CaseMergeSrv @Inject() ( taskSrv: TaskSrv, logSrv: LogSrv, artifactSrv: ArtifactSrv, - jobSrv: JobSrv, + eventSrv: EventSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) { @@ -161,14 +165,6 @@ class CaseMergeSrv @Inject() ( JsString(status) } - private[services] def mergeJobs(newArtifact: Artifact, artifacts: Seq[Artifact])(implicit authContext: AuthContext): Future[Done] = { - jobSrv.find(and(parent("case_artifact", withId(artifacts.map(_.id): _*)), "status" ~= JobStatus.Success), Some("all"), Nil)._1 - .mapAsyncUnordered(5) { job ⇒ - jobSrv.create(newArtifact, baseFields(job)) - } - .runWith(Sink.ignore) - } - private[services] def mergeArtifactsAndJobs(newCase: Case, cases: Seq[Case])(implicit authContext: AuthContext): Future[Done] = { val caseMap = cases.map(c ⇒ c.id → c).toMap val caseFilter = and(parent("case", withId(cases.map(_.id): _*)), "status" ~= "Ok") @@ -217,17 +213,11 @@ class CaseMergeSrv @Inject() ( } } .mapConcat(identity) - .mapAsyncUnordered(5) { + .runForeach { case (newArtifact, sameArtifacts) ⇒ // Then jobs are imported - mergeJobs(newArtifact, sameArtifacts) - .recover { - case error ⇒ - logger.error("Log creation fail", error) - Done - } + eventSrv.publish(MergeArtifact(newArtifact, sameArtifacts, authContext)) } - .runWith(Sink.ignore) } private[services] def mergeCases(cases: Seq[Case])(implicit authContext: AuthContext): Future[Case] = { diff --git a/thehive-backend/conf/routes b/thehive-backend/conf/routes index 57a4dd57cc..ca05053d7d 100644 --- a/thehive-backend/conf/routes +++ b/thehive-backend/conf/routes @@ -2,90 +2,80 @@ # This file defines all application routes (Higher priority routes first) # ~~~~ -GET / controllers.Default.redirect(to = "/index.html") -GET /api/status controllers.StatusCtrl.get -GET /api/logout controllers.AuthenticationCtrl.logout() -POST /api/login controllers.AuthenticationCtrl.login() - -POST /api/_search controllers.SearchCtrl.find() - -GET /api/case controllers.CaseCtrl.find() -POST /api/case/_search controllers.CaseCtrl.find() -PATCH /api/case/_bulk controllers.CaseCtrl.bulkUpdate() -POST /api/case/_stats controllers.CaseCtrl.stats() -POST /api/case controllers.CaseCtrl.create() -GET /api/case/:caseId controllers.CaseCtrl.get(caseId) -PATCH /api/case/:caseId controllers.CaseCtrl.update(caseId) -DELETE /api/case/:caseId controllers.CaseCtrl.delete(caseId) -GET /api/case/:caseId/links controllers.CaseCtrl.linkedCases(caseId) -POST /api/case/:caseId1/_merge/:caseId2 controllers.CaseCtrl.merge(caseId1, caseId2) - -POST /api/case/template/_search controllers.CaseTemplateCtrl.find() -POST /api/case/template controllers.CaseTemplateCtrl.create() -GET /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.get(caseTemplateId) -PATCH /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.update(caseTemplateId) -DELETE /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.delete(caseTemplateId) - -POST /api/case/artifact/_search controllers.ArtifactCtrl.find() -POST /api/case/:caseId/artifact/_search controllers.ArtifactCtrl.findInCase(caseId) -POST /api/case/artifact/_stats controllers.ArtifactCtrl.stats() -POST /api/case/:caseId/artifact controllers.ArtifactCtrl.create(caseId) -GET /api/case/artifact/:artifactId controllers.ArtifactCtrl.get(artifactId) -DELETE /api/case/artifact/:artifactId controllers.ArtifactCtrl.delete(artifactId) -PATCH /api/case/artifact/:artifactId controllers.ArtifactCtrl.update(artifactId) -GET /api/case/artifact/:artifactId/similar controllers.ArtifactCtrl.findSimilar(artifactId) -PATCH /api/case/artifact/_bulk controllers.ArtifactCtrl.bulkUpdate() - -POST /api/case/artifact/:artifactId/job controllers.JobCtrl.create(artifactId) -POST /api/case/artifact/job/_stats controllers.JobCtrl.stats() -POST /api/case/artifact/job/_search controllers.JobCtrl.find() -GET /api/case/artifact/:artifactId/job controllers.JobCtrl.findInArtifact(artifactId) -GET /api/case/artifact/job/:jobId controllers.JobCtrl.get(jobId) - -POST /api/case/:caseId/task/_search controllers.TaskCtrl.findInCase(caseId) -POST /api/case/task/_search controllers.TaskCtrl.find() -POST /api/case/task/_stats controllers.TaskCtrl.stats() -GET /api/case/task/:taskId controllers.TaskCtrl.get(taskId) -PATCH /api/case/task/:taskId controllers.TaskCtrl.update(taskId) -POST /api/case/:caseId/task controllers.TaskCtrl.create(caseId) - -GET /api/case/task/:taskId/log controllers.LogCtrl.findInTask(taskId) -POST /api/case/task/log/_search controllers.LogCtrl.find() -POST /api/case/task/:taskId/log controllers.LogCtrl.create(taskId) -PATCH /api/case/task/log/:logId controllers.LogCtrl.update(logId) -DELETE /api/case/task/log/:logId controllers.LogCtrl.delete(logId) -GET /api/case/task/log/:logId controllers.LogCtrl.get(logId) - -GET /api/flow controllers.FlowCtrl.flow(rootId: Option[String], count: Option[Int]) - -POST /api/analyzer/_search controllers.AnalyzerCtrl.find() -GET /api/analyzer/:analyzerId controllers.AnalyzerCtrl.get(analyzerId) -GET /api/analyzer/:analyzerId/report/:flavor controllers.AnalyzerCtrl.getReport(analyzerId, flavor) - -GET /api/datastore/:hash controllers.AttachmentCtrl.download(hash, name: Option[String]) -GET /api/datastorezip/:hash controllers.AttachmentCtrl.downloadZip(hash, name: Option[String]) - -POST /api/maintenance/migrate org.elastic4play.controllers.MigrationCtrl.migrate +GET / controllers.Default.redirect(to = "/index.html") +GET /api/status controllers.StatusCtrl.get +GET /api/logout controllers.AuthenticationCtrl.logout() +POST /api/login controllers.AuthenticationCtrl.login() + +POST /api/_search controllers.SearchCtrl.find() + +GET /api/case controllers.CaseCtrl.find() +POST /api/case/_search controllers.CaseCtrl.find() +PATCH /api/case/_bulk controllers.CaseCtrl.bulkUpdate() +POST /api/case/_stats controllers.CaseCtrl.stats() +POST /api/case controllers.CaseCtrl.create() +GET /api/case/:caseId controllers.CaseCtrl.get(caseId) +PATCH /api/case/:caseId controllers.CaseCtrl.update(caseId) +DELETE /api/case/:caseId controllers.CaseCtrl.delete(caseId) +GET /api/case/:caseId/links controllers.CaseCtrl.linkedCases(caseId) +POST /api/case/:caseId1/_merge/:caseId2 controllers.CaseCtrl.merge(caseId1, caseId2) + +POST /api/case/template/_search controllers.CaseTemplateCtrl.find() +POST /api/case/template controllers.CaseTemplateCtrl.create() +GET /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.get(caseTemplateId) +PATCH /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.update(caseTemplateId) +DELETE /api/case/template/:caseTemplateId controllers.CaseTemplateCtrl.delete(caseTemplateId) + +POST /api/case/artifact/_search controllers.ArtifactCtrl.find() +POST /api/case/:caseId/artifact/_search controllers.ArtifactCtrl.findInCase(caseId) +POST /api/case/artifact/_stats controllers.ArtifactCtrl.stats() +POST /api/case/:caseId/artifact controllers.ArtifactCtrl.create(caseId) +GET /api/case/artifact/:artifactId controllers.ArtifactCtrl.get(artifactId) +DELETE /api/case/artifact/:artifactId controllers.ArtifactCtrl.delete(artifactId) +PATCH /api/case/artifact/:artifactId controllers.ArtifactCtrl.update(artifactId) +GET /api/case/artifact/:artifactId/similar controllers.ArtifactCtrl.findSimilar(artifactId) +PATCH /api/case/artifact/_bulk controllers.ArtifactCtrl.bulkUpdate() + +POST /api/case/:caseId/task/_search controllers.TaskCtrl.findInCase(caseId) +POST /api/case/task/_search controllers.TaskCtrl.find() +POST /api/case/task/_stats controllers.TaskCtrl.stats() +GET /api/case/task/:taskId controllers.TaskCtrl.get(taskId) +PATCH /api/case/task/:taskId controllers.TaskCtrl.update(taskId) +POST /api/case/:caseId/task controllers.TaskCtrl.create(caseId) + +GET /api/case/task/:taskId/log controllers.LogCtrl.findInTask(taskId) +POST /api/case/task/log/_search controllers.LogCtrl.find() +POST /api/case/task/:taskId/log controllers.LogCtrl.create(taskId) +PATCH /api/case/task/log/:logId controllers.LogCtrl.update(logId) +DELETE /api/case/task/log/:logId controllers.LogCtrl.delete(logId) +GET /api/case/task/log/:logId controllers.LogCtrl.get(logId) + +GET /api/flow controllers.FlowCtrl.flow(rootId: Option[String], count: Option[Int]) + +GET /api/datastore/:hash controllers.AttachmentCtrl.download(hash, name: Option[String]) +GET /api/datastorezip/:hash controllers.AttachmentCtrl.downloadZip(hash, name: Option[String]) + +POST /api/maintenance/migrate org.elastic4play.controllers.MigrationCtrl.migrate #POST /api/maintenance/rehash controllers.MaintenanceCtrl.reHash -GET /api/list org.elastic4play.controllers.DBListCtrl.list() -DELETE /api/list/:itemId org.elastic4play.controllers.DBListCtrl.deleteItem(itemId) -POST /api/list/:listName org.elastic4play.controllers.DBListCtrl.addItem(listName) -GET /api/list/:listName org.elastic4play.controllers.DBListCtrl.listItems(listName) +GET /api/list org.elastic4play.controllers.DBListCtrl.list() +DELETE /api/list/:itemId org.elastic4play.controllers.DBListCtrl.deleteItem(itemId) +POST /api/list/:listName org.elastic4play.controllers.DBListCtrl.addItem(listName) +GET /api/list/:listName org.elastic4play.controllers.DBListCtrl.listItems(listName) -GET /api/user/current controllers.UserCtrl.currentUser() -POST /api/user/_search controllers.UserCtrl.find() -POST /api/user controllers.UserCtrl.create() -GET /api/user/:userId controllers.UserCtrl.get(userId) -DELETE /api/user/:userId controllers.UserCtrl.delete(userId) -PATCH /api/user/:userId controllers.UserCtrl.update(userId) -POST /api/user/:userId/password/set controllers.UserCtrl.setPassword(userId) -POST /api/user/:userId/password/change controllers.UserCtrl.changePassword(userId) +GET /api/user/current controllers.UserCtrl.currentUser() +POST /api/user/_search controllers.UserCtrl.find() +POST /api/user controllers.UserCtrl.create() +GET /api/user/:userId controllers.UserCtrl.get(userId) +DELETE /api/user/:userId controllers.UserCtrl.delete(userId) +PATCH /api/user/:userId controllers.UserCtrl.update(userId) +POST /api/user/:userId/password/set controllers.UserCtrl.setPassword(userId) +POST /api/user/:userId/password/change controllers.UserCtrl.changePassword(userId) -POST /api/stream controllers.StreamCtrl.create() -GET /api/stream/status controllers.StreamCtrl.status -GET /api/stream/:streamId controllers.StreamCtrl.get(streamId) +POST /api/stream controllers.StreamCtrl.create() +GET /api/stream/status controllers.StreamCtrl.status +GET /api/stream/:streamId controllers.StreamCtrl.get(streamId) --> /api/connector connectors.ConnectorRouter +-> /api/connector connectors.ConnectorRouter -GET /*file controllers.AssetCtrl.get(file) +GET /*file controllers.AssetCtrl.get(file) diff --git a/thehive-cortex/.gitignore b/thehive-cortex/.gitignore new file mode 100644 index 0000000000..ae3c172604 --- /dev/null +++ b/thehive-cortex/.gitignore @@ -0,0 +1 @@ +/bin/ diff --git a/thehive-cortex/app/connectors/cortex/CortexConnector.scala b/thehive-cortex/app/connectors/cortex/CortexConnector.scala new file mode 100644 index 0000000000..689806f099 --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/CortexConnector.scala @@ -0,0 +1,22 @@ +package connectors.cortex + +import javax.inject.Singleton + +import play.api.{ Configuration, Environment, Logger } + +import connectors.ConnectorModule + +class CortexConnector( + environment: Environment, + configuration: Configuration) extends ConnectorModule { + val log = Logger(getClass) + + def configure() { + try { + + } + catch { + case t: Throwable ⇒ log.error("Corte connector is disabled because its configuration is invalid", t) + } + } +} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/CortextCtrl.scala b/thehive-cortex/app/connectors/cortex/CortextCtrl.scala new file mode 100644 index 0000000000..b54a1b255a --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/CortextCtrl.scala @@ -0,0 +1,98 @@ +package connectors.cortex + +import scala.concurrent.ExecutionContext + +import org.elastic4play.BadRequestError +import org.elastic4play.NotFoundError +import org.elastic4play.Timed +import org.elastic4play.controllers.Authenticated +import org.elastic4play.controllers.FieldsBodyParser +import org.elastic4play.controllers.Renderer +import org.elastic4play.services.Role + +import connectors.cortex.models.JsonFormat._ +import connectors.Connector +import javax.inject.Inject +import javax.inject.Singleton +import play.api.Logger +import play.api.http.Status +import play.api.mvc.Controller +import play.api.routing.SimpleRouter +import play.api.routing.sird.GET +import play.api.routing.sird.POST +import play.api.routing.sird.UrlContext +import connectors.cortex.services.CortexSrv + +@Singleton +class CortextCtrl @Inject() ( + cortexSrv: CortexSrv, + authenticated: Authenticated, + fieldsBodyParser: FieldsBodyParser, + renderer: Renderer, + implicit val ec: ExecutionContext) extends Controller with Connector with Status { + val name = "cortex" + val log = Logger(getClass) + val router = SimpleRouter { + case POST(p"/job") ⇒ createJob + case GET(p"/job/$jobId<[^/]*>") ⇒ getJob(jobId) + case GET(p"/job") ⇒ listJob + case GET(p"/analyzer/$analyzerId<[^/]*>") ⇒ getAnalyzer(analyzerId) + case GET(p"/analyzer/type/$dataType<[^/]*>") ⇒ getAnalyzerFor(dataType) + case GET(p"/analyzer") ⇒ listAnalyzer + case r ⇒ throw NotFoundError(s"${r.uri} not found") + } + + @Timed + def createJob = authenticated(Role.write).async(fieldsBodyParser) { implicit request ⇒ + val analyzerId = request.body.getString("analyzerId").getOrElse(throw BadRequestError(s"analyzerId is missing")) + val artifactId = request.body.getString("artifactId").getOrElse(throw BadRequestError(s"artifactId is missing")) + cortexSrv.createJob(analyzerId, artifactId).map { job ⇒ + renderer.toOutput(OK, job) + } + } + + @Timed + def getJob(jobId: String) = authenticated(Role.read).async { implicit request ⇒ + cortexSrv.getJob(jobId).map { job ⇒ + renderer.toOutput(OK, job) + } + } + + @Timed + def listJob = authenticated(Role.read).async { implicit request ⇒ + cortexSrv.listJob.map { jobs ⇒ + renderer.toOutput(OK, jobs) + } + } + + @Timed + def getAnalyzer(analyzerId: String) = authenticated(Role.read).async { implicit request ⇒ + cortexSrv.getAnalyzer(analyzerId).map { analyzer ⇒ + renderer.toOutput(OK, analyzer) + } + } + + @Timed + def getAnalyzerFor(dataType: String) = authenticated(Role.read).async { implicit request ⇒ + cortexSrv.getAnalyzersFor(dataType).map { analyzers ⇒ + renderer.toOutput(OK, analyzers) + } + } + + @Timed + def listAnalyzer = authenticated(Role.read).async { implicit request ⇒ + cortexSrv.listAnalyzer.map { analyzers ⇒ + renderer.toOutput(OK, analyzers) + } + } + + //* POST /api/case/artifact/:artifactId/job controllers.JobCtrl.create(artifactId) + //POST /api/case/artifact/job/_stats controllers.JobCtrl.stats() + //POST /api/case/artifact/job/_search controllers.JobCtrl.find() + //GET /api/case/artifact/:artifactId/job controllers.JobCtrl.findInArtifact(artifactId) + //GET /api/case/artifact/job/:jobId controllers.JobCtrl.get(jobId) + //POST /api/analyzer/_search controllers.AnalyzerCtrl.find() + //GET /api/analyzer/:analyzerId controllers.AnalyzerCtrl.get(analyzerId) + //GET /api/analyzer/:analyzerId/report/:flavor controllers.AnalyzerCtrl.getReport(analyzerId, flavor) + +} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/JsonFormat.scala b/thehive-cortex/app/connectors/cortex/JsonFormat.scala new file mode 100644 index 0000000000..c63cedb4df --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/JsonFormat.scala @@ -0,0 +1,11 @@ +//package connectors.cortex +// +//import org.elastic4play.models.JsonFormat.enumFormat +// +//import play.api.libs.json._ +// +//object JsonFormat { +// implicit val jobStatusFormat = enumFormat(JobStatus) +// implicit val jobFormat = Json.format[Job] +// implicit val analyzerFormat = Json.format[Analyzer] +//} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/models/Analyzer.scala b/thehive-cortex/app/connectors/cortex/models/Analyzer.scala new file mode 100644 index 0000000000..7a1e3ba994 --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/models/Analyzer.scala @@ -0,0 +1,17 @@ +package connectors.cortex.models + +import connectors.cortex.services.CortexClient + +trait CortexModel[O] { self ⇒ + def onCortex(cortexId: String): O +} + +case class Analyzer( + name: String, + version: String, + description: String, + dataTypeList: Seq[String], + cortexIds: List[String] = Nil) extends CortexModel[Analyzer] { + def id = (name + "_" + version).replaceAll("\\.", "_") + def onCortex(cortexId: String) = copy(cortexIds = cortexId :: cortexIds) +} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/models/Artifact.scala b/thehive-cortex/app/connectors/cortex/models/Artifact.scala new file mode 100644 index 0000000000..30897b679b --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/models/Artifact.scala @@ -0,0 +1,9 @@ +package connectors.cortex.models + +import java.io.File + +import play.api.libs.json.JsObject + +sealed abstract class CortexArtifact(attributes: JsObject) +case class FileArtifact(data: File, attributes: JsObject) extends CortexArtifact(attributes) +case class DataArtifact(data: String, attributes: JsObject) extends CortexArtifact(attributes) \ No newline at end of file diff --git a/thehive-backend/app/models/Job.scala b/thehive-cortex/app/connectors/cortex/models/Job.scala similarity index 75% rename from thehive-backend/app/models/Job.scala rename to thehive-cortex/app/connectors/cortex/models/Job.scala index 9436e7eeb0..12bd271087 100644 --- a/thehive-backend/app/models/Job.scala +++ b/thehive-cortex/app/connectors/cortex/models/Job.scala @@ -1,55 +1,47 @@ -package models - -import java.util.Date - -import javax.inject.{ Inject, Singleton } - -import scala.concurrent.Future - -import play.api.libs.json.{ JsObject, JsValue, Json } - -import org.elastic4play.JsonFormat.dateFormat -import org.elastic4play.models.{ AttributeDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration } - -import JsonFormat.jobStatusFormat -import services.AuditedModel - -object JobStatus extends Enumeration with HiveEnumeration { - type Type = Value - val InProgress, Success, Failure = Value -} - -trait JobAttributes { _: AttributeDef ⇒ - val analyzerId = attribute("analyzerId", F.stringFmt, "Analyzer", O.readonly) - val status = attribute("status", F.enumFmt(JobStatus), "Status of the job", JobStatus.InProgress) - val artifactId = attribute("artifactId", F.stringFmt, "Original artifact on which this job was executed", O.readonly) - val startDate = attribute("startDate", F.dateFmt, "Timestamp of the job start") // , O.model) - val endDate = optionalAttribute("endDate", F.dateFmt, "Timestamp of the job completion (or fail)") - val report = optionalAttribute("report", F.textFmt, "Analysis result", O.unaudited) - -} -@Singleton -class JobModel @Inject() (artifactModel: ArtifactModel) extends ChildModelDef[JobModel, Job, ArtifactModel, Artifact](artifactModel, "case_artifact_job") with JobAttributes with AuditedModel { - - override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful { - attrs - "report" - "endDate" + - ("startDate" → Json.toJson(new Date)) + - ("status" → Json.toJson(JobStatus.InProgress)) - } -} -class Job(model: JobModel, attributes: JsObject) extends EntityDef[JobModel, Job](model, attributes) with JobAttributes { - override def toJson = super.toJson + ("report" → report().fold[JsValue](JsObject(Nil))(r ⇒ Json.parse(r))) -} -// def insertInArtifact(artifact: CaseArtifacts#ENTITY) = { -// db.create(tableName, attributes + ("parent" -> JsString(artifact.id)) + (s"$$routing" -> JsString(artifact.routing))).map { indexResponse => -// read(indexResponse.getId(), Some(artifact.id), attributes) -// } -// } -// override def toJson = { -// val json = super.toJson -// (json \ "report" \ analyzerId).toOption match { -// case Some(report) => json + ("report" -> report) -// case None => json -// } -// -// } +package connectors.cortex.models + +import java.util.Date + +import javax.inject.{ Inject, Singleton } + +import scala.concurrent.Future + +import play.api.libs.json.{ JsObject, JsValue, Json } + +import org.elastic4play.JsonFormat.dateFormat +import org.elastic4play.models.{ AttributeDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration } + +import JsonFormat.jobStatusFormat +import models.ArtifactModel +import services.AuditedModel +import models.Artifact + +object JobStatus extends Enumeration with HiveEnumeration { + type Type = Value + val InProgress, Success, Failure = Value +} + +trait JobAttributes { _: AttributeDef ⇒ + val analyzerId = attribute("analyzerId", F.stringFmt, "Analyzer", O.readonly) + val status = attribute("status", F.enumFmt(JobStatus), "Status of the job", JobStatus.InProgress) + val artifactId = attribute("artifactId", F.stringFmt, "Original artifact on which this job was executed", O.readonly) + val startDate = attribute("startDate", F.dateFmt, "Timestamp of the job start") // , O.model) + val endDate = optionalAttribute("endDate", F.dateFmt, "Timestamp of the job completion (or fail)") + val report = optionalAttribute("report", F.textFmt, "Analysis result", O.unaudited) + val cortexId = optionalAttribute("cortexId", F.stringFmt, "Id of cortex where the job is run", O.readonly) + +} +@Singleton +class JobModel @Inject() (artifactModel: ArtifactModel) extends ChildModelDef[JobModel, Job, ArtifactModel, Artifact](artifactModel, "case_artifact_job") with JobAttributes with AuditedModel { + + override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful { + attrs - "report" - "endDate" + + ("startDate" → Json.toJson(new Date)) + + ("status" → Json.toJson(JobStatus.InProgress)) + } +} +class Job(model: JobModel, attributes: JsObject) extends EntityDef[JobModel, Job](model, attributes) with JobAttributes { + override def toJson = super.toJson + ("report" → report().fold[JsValue](JsObject(Nil))(r ⇒ Json.parse(r))) +} + +case class CortexJob(id: String, analyzerId: String, artifact: CortexArtifact, date: Date, status: JobStatus.Type) \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala b/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala new file mode 100644 index 0000000000..4656e7d930 --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/models/JsonFormat.scala @@ -0,0 +1,34 @@ +package connectors.cortex.models + +import java.io.File + +import play.api.libs.json.{ JsObject, JsString, Json, OFormat, OWrites, Reads, Writes } +import play.api.libs.json.Json.toJsFieldJsValueWrapper +import org.elastic4play.models.JsonFormat.enumFormat + +object JsonFormat { + implicit val analyzerFormats = Json.format[Analyzer] + + val fileArtifactWrites = OWrites[FileArtifact](fileArtifact ⇒ Json.obj( + "attributes" → fileArtifact.attributes)) + + val fileArtifactReads = Reads[FileArtifact](json ⇒ + (json \ "attributes").validate[JsObject].map { attributes ⇒ + FileArtifact(new File("dummy"), attributes) + }) + val fileArtifactFormat = OFormat(fileArtifactReads, fileArtifactWrites) + val dataArtifactFormat = Json.format[DataArtifact] + val artifactReads = Reads[CortexArtifact](json ⇒ + json.validateOpt[JsObject].flatMap { + case a if a.contains("data") ⇒ json.validate[DataArtifact](dataArtifactFormat) + case a ⇒ json.validate[FileArtifact](fileArtifactFormat) + }) + val artifactWrites = OWrites[CortexArtifact](artifact ⇒ artifact match { + case dataArtifact: DataArtifact ⇒ dataArtifactFormat.writes(dataArtifact) + case fileArtifact: FileArtifact ⇒ fileArtifactWrites.writes(fileArtifact) + }) + + implicit val artifactFormat = OFormat(artifactReads, artifactWrites) + implicit val jobStatusFormat = enumFormat(JobStatus) + implicit val jobFormat = Json.format[CortexJob] +} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/services/CortexClient.scala b/thehive-cortex/app/connectors/cortex/services/CortexClient.scala new file mode 100644 index 0000000000..9a978a1f70 --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/services/CortexClient.scala @@ -0,0 +1,69 @@ +package connectors.cortex.services + +import scala.concurrent.{ ExecutionContext, Future } + +import akka.stream.scaladsl.{ FileIO, Source } + +import play.api.libs.json.{ JsValue, Json } +import play.api.libs.ws.{ WSClient, WSRequest, WSResponse } +import play.api.mvc.MultipartFormData.{ DataPart, FilePart } + +import org.elastic4play.models.JsonFormat._ +import connectors.cortex.models.JsonFormat._ +import connectors.cortex.models.Analyzer +import connectors.cortex.models.FileArtifact +import connectors.cortex.models.DataArtifact +import connectors.cortex.models.Job +import connectors.cortex.models.CortexArtifact +import play.api.libs.json.JsObject + +class CortexClient(name: String, baseUrl: String, key: String) { + def request[A](uri: String, f: WSRequest ⇒ Future[WSResponse], t: WSResponse ⇒ A)(implicit ws: WSClient, ec: ExecutionContext) = { + f(ws.url(baseUrl + "/" + uri).withHeaders("auth" → key)).map { + case response if response.status / 100 == 2 ⇒ t(response) + case error ⇒ ??? + } + } + + def getAnalyzer(analyzerId: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/analyzer/$analyzerId", _.get, _.json.as[Analyzer]) + } + + def listAnalyzer(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/analyzer", _.get, _.json.as[Seq[Analyzer]]) + } + + def analyze(analyzerId: String, artifact: CortexArtifact)(implicit ws: WSClient, ec: ExecutionContext) = { + artifact match { + case FileArtifact(file, attributes) ⇒ + val body = Source(FilePart("data", file.getName, None, FileIO.fromPath(file.toPath)) :: DataPart("_json", attributes.toString) :: Nil) + request(s"/api/analyzer/$analyzerId", _.post(body), _.json) + case a: DataArtifact ⇒ + request(s"/api/analyzer/$analyzerId", _.post(Json.toJson(a)), _.json.as[JsObject]) + } + } + + def listAnalyzerForType(dataType: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/analyzer/type/$dataType", _.get, _.json.as[Seq[Analyzer]]) + } + + def listJob(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/job", _.get, _.json.as[Seq[JsObject]]) + } + + def getJob(jobId: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/job/$jobId", _.get, _.json.as[JsObject]) + } + + def removeJob(jobId: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/job/$jobId", _.delete, _ ⇒ ()) + } + + def report(jobId: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/job/$jobId/report", _.get, r ⇒ r.json.as[JsObject]) + } + + def waitReport(jobId: String, atMost: String)(implicit ws: WSClient, ec: ExecutionContext) = { + request(s"/api/job/$jobId/waitreport", _.get, r ⇒ r.json.as[JsObject]) + } +} \ No newline at end of file diff --git a/thehive-cortex/app/connectors/cortex/services/CortexSrv.scala b/thehive-cortex/app/connectors/cortex/services/CortexSrv.scala new file mode 100644 index 0000000000..81c8b38a36 --- /dev/null +++ b/thehive-cortex/app/connectors/cortex/services/CortexSrv.scala @@ -0,0 +1,142 @@ +package connectors.cortex.services + +import scala.concurrent.Future +import javax.inject.Singleton +import javax.inject.Inject +import play.api.libs.ws.WSClient +import play.api.Configuration +import java.nio.file.Path +import java.nio.file.Paths +import scala.util.Try +import scala.concurrent.ExecutionContext +import play.api.http.Status +import play.api.libs.ws.WSResponse +import play.api.libs.ws.WSRequest +import play.api.libs.json.JsObject +import scala.language.implicitConversions +import akka.actor.ActorDSL.{ Act, actor } +import akka.actor.ActorSystem + +import scala.util.control.NonFatal +import connectors.cortex.models.CortexModel +import connectors.cortex.models.Analyzer +import connectors.cortex.models.CortexJob +import org.elastic4play.services.EventSrv +import play.api.Logger +import services.MergeArtifact +import org.elastic4play.controllers.Fields +import connectors.cortex.models.JobStatus +import akka.stream.scaladsl.Sink +import akka.stream.Materializer + +object CortexConfig { + def getCortexClient(name: String, configuration: Configuration): Option[CortexClient] = { + try { + val url = configuration.getString("url").getOrElse(sys.error("url is missing")) + val key = configuration.getString("key").getOrElse(sys.error("key is missing")) + Some(new CortexClient(name, url, key)) + } + catch { + case NonFatal(_) ⇒ None + } + } + + def getInstances(configuration: Configuration): Map[String, CortexClient] = { + val instances = for { + cfg ← configuration.getConfig("cortex").toSeq + key ← cfg.subKeys + c ← cfg.getConfig(key) + cic ← getCortexClient(key, c) + } yield key → cic + instances.toMap + } +} +case class CortexConfig(truststore: Option[Path], instances: Map[String, CortexClient]) { + + @Inject + def this(configuration: Configuration) = this( + configuration.getString("cortex.cert").map(p ⇒ Paths.get(p)), + CortexConfig.getInstances(configuration)) +} + +// private[services] def mergeJobs(newArtifact: Artifact, artifacts: Seq[Artifact])(implicit authContext: AuthContext): Future[Done] = { +// jobSrv.find(and(parent("case_artifact", withId(artifacts.map(_.id): _*)), "status" ~= JobStatus.Success), Some("all"), Nil)._1 +// .mapAsyncUnordered(5) { job ⇒ +// jobSrv.create(newArtifact, baseFields(job)) +// } +// .runWith(Sink.ignore) +// } + +@Singleton +class CortexSrv @Inject() ( + cortexConfig: CortexConfig, + jobSrv: JobSrv, + eventSrv: EventSrv, + implicit val ws: WSClient, + implicit val ec: ExecutionContext, + implicit val system: ActorSystem, + implicit val mat: Materializer) { + + // implicit def seqCortexModel[A](s: Seq[CortexModel[A]]) = new CortexModel[Seq[A]] { //= new CortexModel[Seq[A]] { + // def onCortex(cortexId: String) = s.map(_.onCortex(cortexId)) + // } + val mergeActor = actor(new Act { + lazy val log = Logger(getClass) + + become { + case MergeArtifact(newArtifact, artifacts, authContext) ⇒ + import org.elastic4play.services.QueryDSL._ + jobSrv.find(and(parent("case_artifact", withId(artifacts.map(_.id): _*)), "status" ~= JobStatus.Success), Some("all"), Nil)._1 + .mapAsyncUnordered(5) { job ⇒ + val baseFields = Fields(job.attributes - "_id" - "_routing" - "_parent" - "_type" - "createdBy" - "createdAt" - "updatedBy" - "updatedAt" - "user") + jobSrv.create(newArtifact, baseFields)(authContext) + } + .runWith(Sink.ignore) + } + }) + + eventSrv.subscribe(mergeActor, classOf[MergeArtifact]) // need to unsubsribe ? + + def askAllCortex[A](f: CortexClient ⇒ Future[CortexModel[A]]): Future[Seq[A]] = { + Future.traverse(cortexConfig.instances.toSeq) { + case (name, cortex) ⇒ f(cortex).map(_.onCortex(name)) + } + } + def askForAllCortex[A](f: CortexClient ⇒ Future[Seq[CortexModel[A]]]): Future[Seq[A]] = { + Future + .traverse(cortexConfig.instances.toSeq) { + case (name, cortex) ⇒ f(cortex).map(_.map(_.onCortex(name))) + } + .map(_.flatten) + } + def getAnalyzer(analyzerId: String): Future[Seq[Analyzer]] = { + askAllCortex(_.getAnalyzer(analyzerId)) + } + + def getAnalyzersFor(dataType: String): Future[Seq[Analyzer]] = { + askForAllCortex(_.listAnalyzerForType(dataType)) + } + + def listAnalyzer: Future[Seq[Analyzer]] = { + askForAllCortex(_.listAnalyzer) + } + + def getJob(jobId: String): Future[CortexJob] = { + // askAllCortex(_.getJob(jobId)) + ??? + } + + def listJob: Future[Seq[CortexJob]] = ??? + def createJob(analyzerId: String, artifactId: String): Future[CortexJob] = ??? +} +/* +GET /api/analyzer controllers.AnalyzerCtrl.list +GET /api/analyzer/:id controllers.AnalyzerCtrl.get(id) +POST /api/analyzer/:id/run controllers.AnalyzerCtrl.analyze(id) +GET /api/analyzer/type/:dataType controllers.AnalyzerCtrl.listForType(dataType) +GET /api/job controllers.JobCtrl.list +GET /api/job/:id controllers.JobCtrl.get(id) +DELETE /api/job/:id controllers.JobCtrl.remove(id) +GET /api/job/:id/report controllers.JobCtrl.report(id) +GET /api/job/:id/waitreport controllers.JobCtrl.waitReport(id, atMost ?= "Inf") +*/ \ No newline at end of file diff --git a/thehive-backend/app/services/JobSrv.scala b/thehive-cortex/app/connectors/cortex/services/JobSrv.scala similarity index 55% rename from thehive-backend/app/services/JobSrv.scala rename to thehive-cortex/app/connectors/cortex/services/JobSrv.scala index 1954c2b74a..30fac04788 100644 --- a/thehive-backend/app/services/JobSrv.scala +++ b/thehive-cortex/app/connectors/cortex/services/JobSrv.scala @@ -1,4 +1,4 @@ -package services +package connectors.cortex.services import java.util.Date @@ -16,17 +16,19 @@ import play.api.libs.json.Json.toJsFieldJsValueWrapper import org.elastic4play.JsonFormat.dateFormat import org.elastic4play.controllers.Fields -import org.elastic4play.services.{ Agg, AttachmentSrv, AuthContext, CreateSrv, DeleteSrv, EventSrv, FindSrv, GetSrv } +import org.elastic4play.services.{ Agg, AttachmentSrv, AuthContext, CreateSrv, DeleteSrv, FindSrv, GetSrv } import org.elastic4play.services.{ QueryDef, UpdateSrv } import org.elastic4play.services.JsonFormat.configWrites -import models.{ Artifact, ArtifactModel, Job, JobModel, JobStatus } +import models.{ Artifact, ArtifactModel } +import services.ArtifactSrv +import connectors.cortex.models.JobModel +import connectors.cortex.models.Job @Singleton class JobSrv( analyzerConf: JsValue, artifactSrv: ArtifactSrv, - analyzerSrv: AnalyzerSrv, jobModel: JobModel, createSrv: CreateSrv, getSrv: GetSrv, @@ -34,12 +36,11 @@ class JobSrv( deleteSrv: DeleteSrv, findSrv: FindSrv, attachmentSrv: AttachmentSrv, - eventSrv: EventSrv, implicit val ec: ExecutionContext) { @Inject def this( configuration: Configuration, artifactSrv: ArtifactSrv, - analyzerSrv: AnalyzerSrv, + // analyzerSrv: AnalyzerSrv, jobModel: JobModel, createSrv: CreateSrv, getSrv: GetSrv, @@ -47,12 +48,11 @@ class JobSrv( deleteSrv: DeleteSrv, findSrv: FindSrv, attachmentSrv: AttachmentSrv, - eventSrv: EventSrv, ec: ExecutionContext) = this( configWrites.writes(configuration.getConfig("analyzer.config").get), artifactSrv, - analyzerSrv, + // analyzerSrv, jobModel, createSrv, getSrv, @@ -60,7 +60,6 @@ class JobSrv( deleteSrv, findSrv, attachmentSrv, - eventSrv, ec) lazy val log = Logger(getClass) @@ -69,47 +68,33 @@ class JobSrv( artifactSrv.get(artifactId).flatMap(a ⇒ create(a, fields)) def create(artifact: Artifact, fields: Fields)(implicit authContext: AuthContext): Future[Job] = { - createSrv[JobModel, Job, Artifact](jobModel, artifact, fields.set("artifactId", artifact.id)).map { - case job if job.status() == JobStatus.InProgress ⇒ - val newJob = for { - analyzer ← analyzerSrv.get(job.analyzerId()) - (status, result) ← analyzer.analyze(attachmentSrv, artifact) - updatedAttributes = Json.obj( - "endDate" → new Date(), - "report" → result.toString, - "status" → status) - newJob ← updateSrv(job, Fields(updatedAttributes)) - _ = eventSrv.publish(StreamActor.Commit(authContext.requestId)) - } yield newJob - newJob.onFailure { - case t ⇒ log.error("Job execution fail", t) - } - job - case job ⇒ job - } - } - - def create(artifactAndFields: Seq[(Artifact, Fields)])(implicit authContext: AuthContext) = { - createSrv[JobModel, Job, Artifact](jobModel, artifactAndFields).map( - _.zip(artifactAndFields).map { - case (Success(job), _) if job.status() != JobStatus.InProgress ⇒ job - case (Success(job), (artifact, _)) ⇒ - val newJob = for { - analyzer ← analyzerSrv.get(job.analyzerId()) - (status, result) ← analyzer.analyze(attachmentSrv, artifact) - updatedAttributes = Json.obj( - "endDate" → new Date(), - "report" → result.toString, - "status" → status) - newJob ← updateSrv(job, Fields(updatedAttributes)) - _ = eventSrv.publish(StreamActor.Commit(authContext.requestId)) - } yield newJob - newJob.onFailure { - case t ⇒ log.error("Job execution fail", t) - } - job - }) + createSrv[JobModel, Job, Artifact](jobModel, artifact, fields.set("artifactId", artifact.id)) + ??? } + //[ M <: org.elastic4play.models.ChildModelDef[M, E, _, PE], + // E <: org.elastic4play.models.EntityDef[M,E], + // PE <: org.elastic4play.models.BaseEntity](model: M, parent: PE, fields: org.elastic4play.controllers.Fields)(implicit authContext: org.elastic4play.services.AuthContext)scala.concurrent.Future[E] + // def create(artifactAndFields: Seq[(Artifact, Fields)])(implicit authContext: AuthContext) = { + // createSrv[JobModel, Job, Artifact](jobModel, artifactAndFields).map( + // _.zip(artifactAndFields).map { + // case (Success(job), _) if job.status() != JobStatus.InProgress ⇒ job + // case (Success(job), (artifact, _)) ⇒ + // val newJob = for { + // analyzer ← analyzerSrv.get(job.analyzerId()) + // (status, result) ← analyzer.analyze(attachmentSrv, artifact) + // updatedAttributes = Json.obj( + // "endDate" → new Date(), + // "report" → result.toString, + // "status" → status) + // newJob ← updateSrv(job, Fields(updatedAttributes)) + // _ = eventSrv.publish(StreamActor.Commit(authContext.requestId)) + // } yield newJob + // newJob.onFailure { + // case t ⇒ log.error("Job execution fail", t) + // } + // job + // }) + // } def get(id: String)(implicit Context: AuthContext) = getSrv[JobModel, Job](jobModel, id) diff --git a/thehive-cortex/build.sbt b/thehive-cortex/build.sbt new file mode 100644 index 0000000000..0f105d179a --- /dev/null +++ b/thehive-cortex/build.sbt @@ -0,0 +1,8 @@ +import Dependencies._ + +libraryDependencies ++= Seq( + Library.Play.ws, + Library.elastic4play +) + +enablePlugins(PlayScala) diff --git a/thehive-misp/app/connectors/misp/MispModule.scala b/thehive-misp/app/connectors/misp/MispModule.scala index 7f0ea6a98e..bab80ac1b3 100644 --- a/thehive-misp/app/connectors/misp/MispModule.scala +++ b/thehive-misp/app/connectors/misp/MispModule.scala @@ -6,6 +6,7 @@ import play.api.{ Configuration, Environment, Logger } import connectors.ConnectorModule +@Singleton class MispConnector( environment: Environment, configuration: Configuration) extends ConnectorModule {