List of alerts ({{$vm.list.total || 0}} of {{$vm.alertList
{{::event.observableCount || 0}} |
{{event.date | shortDate}}
+
|
diff --git a/frontend/bower.json b/frontend/bower.json
index ba58fd6409..6a6988cc69 100644
--- a/frontend/bower.json
+++ b/frontend/bower.json
@@ -1,6 +1,6 @@
{
"name": "thehive",
- "version": "4.0.3-1",
+ "version": "4.0.4-1",
"license": "AGPL-3.0",
"dependencies": {
"jquery": "^3.4.1",
diff --git a/frontend/package.json b/frontend/package.json
index 963e781297..05da8f241d 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "thehive",
- "version": "4.0.3-1",
+ "version": "4.0.4-1",
"license": "AGPL-3.0",
"repository": {
"type": "git",
diff --git a/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala b/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala
index 683b1ee489..eef50fdcdc 100644
--- a/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala
+++ b/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala
@@ -6,7 +6,7 @@ import play.api.libs.json._
case class Tag(
id: Option[String],
name: String,
- colour: Option[Int],
+ colour: Option[String],
exportable: Option[Boolean]
)
@@ -14,10 +14,7 @@ object Tag {
implicit val reads: Reads[Tag] =
((JsPath \ "id").readNullable[String] and
(JsPath \ "name").read[String] and
- (JsPath \ "colour").readNullable[String].map {
- case Some(c) if c.headOption.contains('#') => Some(Integer.parseUnsignedInt(c.tail, 16))
- case _ => None
- } and
+ (JsPath \ "colour").readNullable[String] and
(JsPath \ "exportable").readNullable[Boolean])(Tag.apply _)
implicit val writes: Writes[Tag] = Json.writes[Tag]
diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala
index 877870c8cc..45349ba47e 100644
--- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala
+++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala
@@ -64,7 +64,7 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification
attributeCount = Some(11),
distribution = 1,
attributes = Nil,
- tags = Seq(Tag(Some("1"), "TH-test", Some(0x36a3a3), None), Tag(Some("2"), "TH-test-2", Some(0x1ac7c7), None))
+ tags = Seq(Tag(Some("1"), "TH-test", Some("#36a3a3"), None), Tag(Some("2"), "TH-test-2", Some("#1ac7c7"), None))
)
)
}
diff --git a/thehive/app/org/thp/thehive/TheHiveModule.scala b/thehive/app/org/thp/thehive/TheHiveModule.scala
index 88f1e801d9..372797b129 100644
--- a/thehive/app/org/thp/thehive/TheHiveModule.scala
+++ b/thehive/app/org/thp/thehive/TheHiveModule.scala
@@ -101,6 +101,7 @@ class TheHiveModule(environment: Environment, configuration: Configuration) exte
integrityCheckOpsBindings.addBinding.to[CaseTemplateIntegrityCheckOps]
integrityCheckOpsBindings.addBinding.to[DataIntegrityCheckOps]
integrityCheckOpsBindings.addBinding.to[CaseIntegrityCheckOps]
+ integrityCheckOpsBindings.addBinding.to[AlertIntegrityCheckOps]
bind[ActorRef].annotatedWithName("integrity-check-actor").toProvider[IntegrityCheckActorProvider]
bind[ActorRef].annotatedWithName("flow-actor").toProvider[FlowActorProvider]
diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala
index 05cc1fe01b..9a492db79c 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala
@@ -8,8 +8,6 @@ import org.thp.scalligraph.models.{Database, Entity, UMapping}
import org.thp.scalligraph.query._
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.traversal._
-
-import scala.collection.JavaConverters._
import org.thp.scalligraph.{
AuthorizationError,
BadRequestError,
@@ -37,6 +35,7 @@ import play.api.mvc.{Action, AnyContent, Results}
import java.util.function.BiPredicate
import java.util.{Base64, List => JList, Map => JMap}
import javax.inject.{Inject, Named, Singleton}
+import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
@Singleton
@@ -488,5 +487,12 @@ class PublicAlert @Inject() (
case _ => Failure(BadRequestError("Invalid custom fields format"))
})
.property("case", db.idMapping)(_.select(_.`case`._id).readonly)
+ .property("imported", UMapping.boolean)(_.select(_.imported).readonly)
+ .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly)
+ .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly)
+ .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly)
.build
}
diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala
index 9ff118aff8..8aa555e6ba 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala
@@ -1,29 +1,27 @@
package org.thp.thehive.controllers.v0
import org.apache.tinkerpop.gremlin.process.traversal.P
+import org.thp.scalligraph._
import org.thp.scalligraph.controllers.{Entrypoint, FPathElem, FPathEmpty, FieldsParser}
import org.thp.scalligraph.models.{Database, UMapping}
import org.thp.scalligraph.query._
import org.thp.scalligraph.traversal.TraversalOps._
-import org.thp.scalligraph.traversal.{Converter, IteratorOutput, Traversal}
-import org.thp.scalligraph.{RichSeq, _}
+import org.thp.scalligraph.traversal.{IteratorOutput, Traversal}
import org.thp.thehive.controllers.v0.Conversion._
import org.thp.thehive.dto.v0.{InputCase, InputTask}
import org.thp.thehive.dto.v1.InputCustomFieldValue
import org.thp.thehive.models._
+import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services.CaseOps._
import org.thp.thehive.services.CaseTemplateOps._
import org.thp.thehive.services.CustomFieldOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
-import org.thp.thehive.services.TaskOps._
import org.thp.thehive.services.UserOps._
-import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services._
import play.api.libs.json._
import play.api.mvc.{Action, AnyContent, Results}
-import java.lang.{Long => JLong}
import javax.inject.{Inject, Named, Singleton}
import scala.util.{Failure, Success}
@@ -196,25 +194,27 @@ class PublicCase @Inject() (
override val entityName: String = "case"
override val initialQuery: Query =
Query.init[Traversal.V[Case]]("listCase", (graph, authContext) => caseSrv.startTraversal(graph).visible(organisationSrv)(authContext))
- override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Case]](
- "getCase",
- FieldsParser[EntityIdOrName],
- (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(organisationSrv)(authContext)
- )
- override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput](
- "page",
- FieldsParser[OutputParam],
- {
- case (OutputParam(from, to, withStats, _), caseSteps, authContext) =>
- caseSteps
- .richPage(from, to, withTotal = true) {
- case c if withStats =>
- c.richCaseWithCustomRenderer(caseStatsRenderer(authContext))(authContext)
- case c =>
- c.richCase(authContext).domainMap(_ -> JsObject.empty)
- }
- }
- )
+ override val getQuery: ParamQuery[EntityIdOrName] =
+ Query.initWithParam[EntityIdOrName, Traversal.V[Case]](
+ "getCase",
+ FieldsParser[EntityIdOrName],
+ (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(organisationSrv)(authContext)
+ )
+ override val pageQuery: ParamQuery[OutputParam] =
+ Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput](
+ "page",
+ FieldsParser[OutputParam],
+ {
+ case (OutputParam(from, to, withStats, _), caseSteps, authContext) =>
+ caseSteps
+ .richPage(from, to, withTotal = true) {
+ case c if withStats =>
+ c.richCaseWithCustomRenderer(caseStatsRenderer(authContext))(authContext)
+ case c =>
+ c.richCase(authContext).domainMap(_ -> JsObject.empty)
+ }
+ }
+ )
override val outputQuery: Query = Query.outputWithContext[RichCase, Traversal.V[Case]]((caseSteps, authContext) => caseSteps.richCase(authContext))
override val extraQueries: Seq[ParamQuery[_]] = Seq(
Query[Traversal.V[Case], Traversal.V[Observable]](
@@ -320,66 +320,11 @@ class PublicCase @Inject() (
} yield Json.obj("customFields" -> values)
case _ => Failure(BadRequestError("Invalid custom fields format"))
})
- .property("computed.handlingDurationInDays", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(86400000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInHours", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(3600000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInMinutes", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(60000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInSeconds", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(1000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
+ .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly)
+ .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly)
.property("viewingOrganisation", UMapping.string)(
_.authSelect((cases, authContext) => cases.organisations.visible(authContext).value(_.name)).readonly
)
diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala
index 88fae78de7..b3371255ad 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala
@@ -349,6 +349,7 @@ object Conversion {
implicit val reportTagWrites: Writes[ReportTag] = Writes[ReportTag] { tag =>
Json.obj("level" -> tag.level.toString, "namespace" -> tag.namespace, "predicate" -> tag.predicate, "value" -> tag.value)
}
+
implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable](
_.into[OutputObservable]
.withFieldConst(_._type, "case_artifact")
diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala
index dba625dc0a..c1b21d0240 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala
@@ -12,6 +12,7 @@ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal}
import org.thp.thehive.controllers.v0.Conversion._
import org.thp.thehive.dto.v0.{InputAttachment, InputObservable}
import org.thp.thehive.models._
+import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services.CaseOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
@@ -21,12 +22,14 @@ import play.api.Configuration
import play.api.libs.Files.DefaultTemporaryFileCreator
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
import play.api.mvc.{Action, AnyContent, Results}
+import shapeless._
import java.io.FilterInputStream
import java.nio.file.Files
+import java.util.Base64
import javax.inject.{Inject, Named, Singleton}
import scala.collection.JavaConverters._
-import scala.util.{Failure, Success}
+import scala.util.{Failure, Success, Try}
@Singleton
class ObservableCtrl @Inject() (
@@ -37,6 +40,7 @@ class ObservableCtrl @Inject() (
observableTypeSrv: ObservableTypeSrv,
caseSrv: CaseSrv,
organisationSrv: OrganisationSrv,
+ alertSrv: AlertSrv,
attachmentSrv: AttachmentSrv,
errorHandler: ErrorHandler,
@Named("v0") override val queryExecutor: QueryExecutor,
@@ -44,8 +48,11 @@ class ObservableCtrl @Inject() (
temporaryFileCreator: DefaultTemporaryFileCreator
) extends ObservableRenderer
with QueryCtrl {
- def create(caseId: String): Action[AnyContent] =
- entrypoint("create artifact")
+
+ type AnyAttachmentType = InputAttachment :+: FFile :+: String :+: CNil
+
+ def createInCase(caseId: String): Action[AnyContent] =
+ entrypoint("create artifact in case")
.extract("artifact", FieldsParser[InputObservable])
.extract("isZip", FieldsParser.boolean.optional.on("isZip"))
.extract("zipPassword", FieldsParser.string.optional.on("zipPassword"))
@@ -71,10 +78,10 @@ class ObservableCtrl @Inject() (
val successesAndFailures =
if (observableType.isAttachment)
inputAttachObs
- .flatMap(obs => obs.attachment.map(createAttachmentObservable(case0, obs, _)))
+ .flatMap(obs => obs.attachment.map(createAttachmentObservableInCase(case0, obs, _)))
else
inputAttachObs
- .flatMap(obs => obs.data.map(createSimpleObservable(case0, obs, _)))
+ .flatMap(obs => obs.data.map(createSimpleObservableInCase(case0, obs, _)))
val (successes, failures) = successesAndFailures
.foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) {
case ((s, f), Right(o)) => (s :+ o, f)
@@ -85,7 +92,7 @@ class ObservableCtrl @Inject() (
}
}
- def createSimpleObservable(
+ private def createSimpleObservableInCase(
`case`: Case with Entity,
inputObservable: InputObservable,
data: String
@@ -98,7 +105,7 @@ class ObservableCtrl @Inject() (
case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data)))
}
- def createAttachmentObservable(
+ private def createAttachmentObservableInCase(
`case`: Case with Entity,
inputObservable: InputObservable,
fileOrAttachment: Either[FFile, InputAttachment]
@@ -121,6 +128,113 @@ class ObservableCtrl @Inject() (
Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename))))
}
+ def createInAlert(alertId: String): Action[AnyContent] =
+ entrypoint("create artifact in alert")
+ .extract("artifact", FieldsParser[InputObservable])
+ .extract("isZip", FieldsParser.boolean.optional.on("isZip"))
+ .extract("zipPassword", FieldsParser.string.optional.on("zipPassword"))
+ .auth { implicit request =>
+ val inputObservable: InputObservable = request.body("artifact")
+ val isZip: Option[Boolean] = request.body("isZip")
+ val zipPassword: Option[String] = request.body("zipPassword")
+ val inputAttachObs = if (isZip.contains(true)) getZipFiles(inputObservable, zipPassword) else Seq(inputObservable)
+
+ db
+ .roTransaction { implicit graph =>
+ for {
+ alert <-
+ alertSrv
+ .get(EntityIdOrName(alertId))
+ .can(organisationSrv, Permissions.manageAlert)
+ .orFail(AuthorizationError("Operation not permitted"))
+ observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType))
+ } yield (alert, observableType)
+ }
+ .map {
+ case (alert, observableType) =>
+ val successesAndFailures =
+ if (observableType.isAttachment)
+ inputAttachObs
+ .flatMap { obs =>
+ (obs.attachment.map(_.fold(Coproduct[AnyAttachmentType](_), Coproduct[AnyAttachmentType](_))) ++
+ obs.data.map(Coproduct[AnyAttachmentType](_)))
+ .map(createAttachmentObservableInAlert(alert, obs, _))
+ }
+ else
+ inputAttachObs
+ .flatMap(obs => obs.data.map(createSimpleObservableInAlert(alert, obs, _)))
+ val (successes, failures) = successesAndFailures
+ .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) {
+ case ((s, f), Right(o)) => (s :+ o, f)
+ case ((s, f), Left(o)) => (s, f :+ o)
+ }
+ if (failures.isEmpty) Results.Created(JsArray(successes))
+ else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures))
+ }
+ }
+
+ private def createSimpleObservableInAlert(
+ alert: Alert with Entity,
+ inputObservable: InputObservable,
+ data: String
+ )(implicit authContext: AuthContext): Either[JsValue, JsValue] =
+ db
+ .tryTransaction { implicit graph =>
+ alertSrv.createObservable(alert, inputObservable.toObservable, data)
+ } match {
+ case Success(o) => Right(o.toJson)
+ case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data)))
+ }
+
+ private def createAttachmentObservableInAlert(
+ alert: Alert with Entity,
+ inputObservable: InputObservable,
+ attachment: AnyAttachmentType
+ )(implicit authContext: AuthContext): Either[JsValue, JsValue] =
+ db
+ .tryTransaction { implicit graph =>
+ object createAttachment extends Poly1 {
+ implicit val fromFile: Case.Aux[FFile, Try[RichObservable]] = at[FFile] { file =>
+ alertSrv.createObservable(alert, inputObservable.toObservable, file)
+ }
+ implicit val fromAttachment: Case.Aux[InputAttachment, Try[RichObservable]] = at[InputAttachment] { attachment =>
+ for {
+ attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id)
+ obs <- alertSrv.createObservable(alert, inputObservable.toObservable, attach)
+ } yield obs
+ }
+
+ implicit val fromString: Case.Aux[String, Try[RichObservable]] = at[String] { data =>
+ data.split(';') match {
+ case Array(filename, contentType, value) =>
+ val data = Base64.getDecoder.decode(value)
+ attachmentSrv
+ .create(filename, contentType, data)
+ .flatMap(attachment => alertSrv.createObservable(alert, inputObservable.toObservable, attachment))
+ case Array(filename, contentType) =>
+ attachmentSrv
+ .create(filename, contentType, Array.emptyByteArray)
+ .flatMap(attachment => alertSrv.createObservable(alert, inputObservable.toObservable, attachment))
+ case data =>
+ Failure(InvalidFormatAttributeError("artifacts.data", "filename;contentType;base64value", Set.empty, FString(data.mkString(";"))))
+ }
+ }
+ }
+ attachment.fold(createAttachment)
+ } match {
+ case Success(o) => Right(o.toJson)
+ case _ =>
+ object attachmentName extends Poly1 {
+ implicit val fromFile: Case.Aux[FFile, String] = at[FFile](_.filename)
+ implicit val fromAttachment: Case.Aux[InputAttachment, String] = at[InputAttachment](_.name)
+ implicit val fromString: Case.Aux[String, String] = at[String] { data =>
+ if (data.contains(';')) data.takeWhile(_ != ';') else "no name"
+ }
+ }
+ val filename = attachment.fold(attachmentName)
+ Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename))))
+ }
+
def get(observableId: String): Action[AnyContent] =
entrypoint("get observable")
.authRoTransaction(db) { implicit request => implicit graph =>
@@ -141,7 +255,7 @@ class ObservableCtrl @Inject() (
val propertyUpdaters: Seq[PropertyUpdater] = request.body("observable")
observableSrv
.update(
- _.get(EntityIdOrName(observableId)).can(Permissions.manageObservable),
+ _.get(EntityIdOrName(observableId)).canManage(organisationSrv),
propertyUpdaters
)
.flatMap {
@@ -177,7 +291,7 @@ class ObservableCtrl @Inject() (
ids
.toTry { id =>
observableSrv
- .update(_.get(EntityIdOrName(id)).can(Permissions.manageObservable), properties)
+ .update(_.get(EntityIdOrName(id)).canManage(organisationSrv), properties)
}
.map(_ => Results.NoContent)
}
@@ -189,7 +303,7 @@ class ObservableCtrl @Inject() (
observable <-
observableSrv
.get(EntityIdOrName(observableId))
- .can(Permissions.manageObservable)
+ .canManage(organisationSrv)
.getOrFail("Observable")
_ <- observableSrv.remove(observable)
} yield Results.NoContent
diff --git a/thehive/app/org/thp/thehive/controllers/v0/Router.scala b/thehive/app/org/thp/thehive/controllers/v0/Router.scala
index 4992130d71..a690b0f17d 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/Router.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/Router.scala
@@ -78,13 +78,17 @@ class Router @Inject() (
case POST(p"/case/artifact/_search") => observableCtrl.search
// case POST(p"/case/:caseId/artifact/_search") => observableCtrl.findInCase(caseId)
case POST(p"/case/artifact/_stats") => observableCtrl.stats
- case POST(p"/case/$caseId/artifact") => observableCtrl.create(caseId) // Audit ok
+ case POST(p"/case/$caseId/artifact") => observableCtrl.createInCase(caseId) // Audit ok
case GET(p"/case/artifact/$observableId") => observableCtrl.get(observableId)
- case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok
- case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok
- case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok
+ case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok
+ case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok
+ case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok
case GET(p"/case/artifact/$observableId/similar") => observableCtrl.findSimilar(observableId)
case POST(p"/case/artifact/$observableId/shares") => shareCtrl.shareObservable(observableId)
+ case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok
+ case PATCH(p"/alert/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok
+ case PATCH(p"/alert/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok
+ case DELETE(p"/alert/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok
case GET(p"/case") => caseCtrl.search
case POST(p"/case") => caseCtrl.create // Audit ok
diff --git a/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala
index 68959f06b6..777f402e2d 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala
@@ -174,7 +174,8 @@ class ShareCtrl @Inject() (
val shares = caseSrv
.get(EntityIdOrName(caseId))
.shares
- .filter(_.organisation.filterNot(_.get(request.organisation)).visible)
+ .visible
+ .filterNot(_.get(request.organisation))
.richShare
.toSeq
diff --git a/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala
index b9f56a005a..970be79137 100644
--- a/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala
@@ -16,7 +16,6 @@ import play.api.mvc.{Action, AnyContent, Results}
import java.nio.file.Files
import javax.inject.{Inject, Named, Singleton}
-import scala.util.Try
class TagCtrl @Inject() (
override val entrypoint: Entrypoint,
@@ -67,13 +66,10 @@ class TagCtrl @Inject() (
colour =
(entry \ "colour")
.asOpt[String]
- .map(parseColour)
- .getOrElse(0) // black
+ .getOrElse("#000000")
e = (entry \ "description").asOpt[String] orElse (entry \ "expanded").asOpt[String]
} yield Tag(namespace, predicate, Some(v), e, colour)
- def parseColour(colour: String): Int = if (colour(0) == '#') Try(Integer.parseUnsignedInt(colour.tail, 16)).getOrElse(0) else 0
-
private def distinct(valueOpt: Option[String], acc: (Seq[JsObject], Seq[String]), v: JsObject): (Seq[JsObject], Seq[String]) =
if (valueOpt.isDefined && acc._2.contains(valueOpt.get)) acc
else (acc._1 :+ v, valueOpt.fold(acc._2)(acc._2 :+ _))
@@ -89,8 +85,7 @@ class TagCtrl @Inject() (
colour =
(predicate \ "colour")
.asOpt[String]
- .map(parseColour)
- .getOrElse(0) // black
+ .getOrElse("#000000")
} yield Tag(namespace, v, None, e, colour)
def get(tagId: String): Action[AnyContent] =
@@ -141,7 +136,7 @@ class PublicTag @Inject() (tagSrv: TagSrv) extends PublicData {
// val namespace = UMapping.string.getProperty(v, "namespace")
// val predicate = UMapping.string.getProperty(v, "predicate")
// val value = UMapping.string.optional.getProperty(v, "value")
-// Tag(namespace, predicate, value, None, 0).toString
+// Tag(namespace, predicate, value, None, "#000000").toString
// },
// Converter.identity[String]
// )
diff --git a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala
index e4c0ad0f7a..6c50e51912 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala
@@ -1,6 +1,5 @@
package org.thp.thehive.controllers.v1
-import java.util.{List => JList, Map => JMap}
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.traversal.{Converter, Traversal}
@@ -10,6 +9,8 @@ import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services.OrganisationSrv
import play.api.libs.json._
+import java.util.{Date, List => JList, Map => JMap}
+
trait AlertRenderer extends BaseRenderer[Alert] {
implicit val similarCaseWrites: Writes[(RichCase, SimilarStats)] = Writes[(RichCase, SimilarStats)] {
case (richCase, similarStats) =>
@@ -39,6 +40,9 @@ trait AlertRenderer extends BaseRenderer[Alert] {
_.similarCases(organisationSrv, caseFilter = None).fold.domainMap(sc => JsArray(sc.sorted.map(Json.toJson(_))))
}
+ def importDate: Traversal.V[Alert] => Traversal[JsValue, JList[Date], Converter[JsValue, JList[Date]]] =
+ _.importDate.fold.domainMap(_.headOption.fold[JsValue](JsNull)(d => JsNumber(d.getTime)))
+
def alertStatsRenderer(organisationSrv: OrganisationSrv, extraData: Set[String])(implicit
authContext: AuthContext
): Traversal.V[Alert] => JsTraversal = { implicit traversal =>
@@ -47,6 +51,7 @@ trait AlertRenderer extends BaseRenderer[Alert] {
traversal,
{
case (f, "similarCases") => addData("similarCases", f)(similarCasesStats(organisationSrv))
+ case (f, "importDate") => addData("importDate", f)(importDate)
case (f, _) => f
}
)
diff --git a/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala
index 85649611b1..5ed8e95d55 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala
@@ -15,6 +15,7 @@ import org.thp.thehive.services.CaseOps._
import org.thp.thehive.services.CaseTemplateOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
+import org.thp.thehive.services.ShareOps._
import org.thp.thehive.services.TaskOps._
import org.thp.thehive.services.UserOps._
import org.thp.thehive.services._
@@ -71,7 +72,8 @@ class CaseCtrl @Inject() (
),
Query[Traversal.V[Case], Traversal.V[User]]("assignableUsers", (caseSteps, authContext) => caseSteps.assignableUsers(authContext)),
Query[Traversal.V[Case], Traversal.V[Organisation]]("organisations", (caseSteps, authContext) => caseSteps.organisations.visible(authContext)),
- Query[Traversal.V[Case], Traversal.V[Alert]]("alerts", (caseSteps, authContext) => caseSteps.alert.visible(organisationSrv)(authContext))
+ Query[Traversal.V[Case], Traversal.V[Alert]]("alerts", (caseSteps, authContext) => caseSteps.alert.visible(organisationSrv)(authContext)),
+ Query[Traversal.V[Case], Traversal.V[Share]]("shares", (caseSteps, authContext) => caseSteps.shares.visible(authContext))
)
def create: Action[AnyContent] =
diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala
index 893f4c7706..08e38d3023 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala
@@ -4,7 +4,7 @@ import io.scalaland.chimney.dsl._
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.controllers.Renderer
import org.thp.scalligraph.models.Entity
-import org.thp.thehive.dto.v1._
+import org.thp.thehive.dto.v1.{InputTaxonomy, OutputTaxonomy, _}
import org.thp.thehive.models._
import play.api.libs.json.{JsObject, JsValue, Json}
@@ -210,7 +210,7 @@ object Conversion {
.transform
)
- implicit val organiastionRenderer: Renderer.Aux[Organisation with Entity, OutputOrganisation] =
+ implicit val organisationRenderer: Renderer.Aux[Organisation with Entity, OutputOrganisation] =
Renderer.toJson[Organisation with Entity, OutputOrganisation](organisation =>
OutputOrganisation(
organisation._id.toString,
@@ -258,6 +258,41 @@ object Conversion {
.transform
}
+ implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) {
+
+ def toTaxonomy: Taxonomy =
+ inputTaxonomy
+ .into[Taxonomy]
+ .transform
+ }
+
+ implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] =
+ Renderer.toJson[RichTaxonomy, OutputTaxonomy](
+ _.into[OutputTaxonomy]
+ .withFieldComputed(_._id, _._id.toString)
+ .withFieldConst(_._type, "Taxonomy")
+ .withFieldComputed(_.tags, _.tags.map(_.toOutput))
+ .withFieldConst(_.extraData, JsObject.empty)
+ .transform
+ )
+
+ implicit val taxonomyWithStatsOutput: Renderer.Aux[(RichTaxonomy, JsObject), OutputTaxonomy] =
+ Renderer.toJson[(RichTaxonomy, JsObject), OutputTaxonomy] { taxoWithExtraData =>
+ taxoWithExtraData
+ ._1
+ .into[OutputTaxonomy]
+ .withFieldComputed(_._id, _._id.toString)
+ .withFieldConst(_._type, "Taxonomy")
+ .withFieldComputed(_.tags, _.tags.map(_.toOutput))
+ .withFieldConst(_.extraData, taxoWithExtraData._2)
+ .transform
+ }
+
+ implicit val tagOutput: Renderer.Aux[Tag, OutputTag] =
+ Renderer.toJson[Tag, OutputTag](
+ _.into[OutputTag].transform
+ )
+
implicit class InputUserOps(inputUser: InputUser) {
def toUser: User =
@@ -295,6 +330,14 @@ object Conversion {
.transform
}
+ implicit val shareOutput: Renderer.Aux[RichShare, OutputShare] = Renderer.toJson[RichShare, OutputShare](
+ _.into[OutputShare]
+ .withFieldComputed(_._id, _.share._id.toString)
+ .withFieldConst(_._type, "Share")
+ .withFieldComputed(_.caseId, _.caseId.toString)
+ .transform
+ )
+
implicit val profileOutput: Renderer.Aux[Profile with Entity, OutputProfile] = Renderer.toJson[Profile with Entity, OutputProfile](profile =>
profile
.asInstanceOf[Profile]
@@ -344,6 +387,7 @@ object Conversion {
.withFieldConst(_.data, None)
.transform
}
+
implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable](richObservable =>
richObservable
.into[OutputObservable]
@@ -454,4 +498,41 @@ object Conversion {
.transform
}
+ implicit class InputPatternOps(inputPattern: InputPattern) {
+ def toPattern: Pattern =
+ inputPattern
+ .into[Pattern]
+ .withFieldRenamed(_.external_id, _.patternId)
+ .withFieldComputed(_.tactics, _.kill_chain_phases.map(_.phase_name).toSet)
+ .withFieldRenamed(_.`type`, _.patternType)
+ .withFieldRenamed(_.x_mitre_platforms, _.platforms)
+ .withFieldRenamed(_.x_mitre_data_sources, _.dataSources)
+ .withFieldRenamed(_.x_mitre_version, _.revision)
+ .transform
+ }
+
+ implicit val richPatternRenderer: Renderer.Aux[RichPattern, OutputPattern] =
+ Renderer.toJson[RichPattern, OutputPattern](
+ _.into[OutputPattern]
+ .withFieldComputed(_._id, _._id.toString)
+ .withFieldConst(_._type, "Pattern")
+ .withFieldComputed(_.parent, _.parent.map(_.patternId))
+ .transform
+ )
+
+ implicit class InputProcedureOps(inputProcedure: InputProcedure) {
+ def toProcedure: Procedure =
+ inputProcedure
+ .into[Procedure]
+ .transform
+ }
+
+ implicit val richProcedureRenderer: Renderer.Aux[RichProcedure, OutputProcedure] =
+ Renderer.toJson[RichProcedure, OutputProcedure](
+ _.into[OutputProcedure]
+ .withFieldComputed(_._id, _._id.toString)
+ .withFieldComputed(_.patternId, _.pattern.patternId)
+ .transform
+ )
+
}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala
index 4ccbd4a908..9f988d865e 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala
@@ -37,8 +37,11 @@ class DescribeCtrl @Inject() (
observableTypeCtrl: ObservableTypeCtrl,
organisationCtrl: OrganisationCtrl,
// pageCtrl: PageCtrl,
+ patternCtrl: PatternCtrl,
+ procedureCtrl: ProcedureCtrl,
profileCtrl: ProfileCtrl,
taskCtrl: TaskCtrl,
+ taxonomyCtrl: TaxonomyCtrl,
userCtrl: UserCtrl,
customFieldSrv: CustomFieldSrv,
impactStatusSrv: ImpactStatusSrv,
@@ -107,8 +110,11 @@ class DescribeCtrl @Inject() (
),
EntityDescription("organisation", "listOrganisation", organisationCtrl.publicProperties.list.flatMap(propertyToJson("organisation", _))),
// EntityDescription("page", "listPage", pageCtrl.publicProperties.list.flatMap(propertyToJson("page", _)))
+ EntityDescription("pattern", "listPattern", patternCtrl.publicProperties.list.flatMap(propertyToJson("pattern", _))),
+ EntityDescription("procedure", "listProcedure", procedureCtrl.publicProperties.list.flatMap(propertyToJson("procedure", _))),
EntityDescription("profile", "listProfile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))),
EntityDescription("task", "listTask", taskCtrl.publicProperties.list.flatMap(propertyToJson("case_task", _))),
+ EntityDescription("taxonomy", "listTaxonomy", taxonomyCtrl.publicProperties.list.flatMap(propertyToJson("taxonomy", _))),
EntityDescription("user", "listUser", userCtrl.publicProperties.list.flatMap(propertyToJson("user", _)))
) ++ describeCortexEntity("job", "listJob", "JobCtrl") ++
describeCortexEntity("action", "listAction", "ActionCtrl")
diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala
index 98c0d29e92..479d5e7cb9 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala
@@ -12,6 +12,7 @@ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal}
import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.dto.v1.{InputAttachment, InputObservable}
import org.thp.thehive.models._
+import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services.CaseOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
@@ -21,12 +22,14 @@ import play.api.libs.Files.DefaultTemporaryFileCreator
import play.api.libs.json.{JsArray, JsValue, Json}
import play.api.mvc.{Action, AnyContent, Results}
import play.api.{Configuration, Logger}
+import shapeless.{:+:, CNil, Coproduct, Poly1}
import java.io.FilterInputStream
import java.nio.file.Files
+import java.util.Base64
import javax.inject.{Inject, Singleton}
import scala.collection.JavaConverters._
-import scala.util.{Failure, Success}
+import scala.util.{Failure, Success, Try}
@Singleton
class ObservableCtrl @Inject() (
@@ -36,6 +39,7 @@ class ObservableCtrl @Inject() (
observableSrv: ObservableSrv,
observableTypeSrv: ObservableTypeSrv,
caseSrv: CaseSrv,
+ alertSrv: AlertSrv,
organisationSrv: OrganisationSrv,
attachmentSrv: AttachmentSrv,
errorHandler: ErrorHandler,
@@ -44,6 +48,8 @@ class ObservableCtrl @Inject() (
) extends QueryableCtrl
with ObservableRenderer {
+ type AnyAttachmentType = InputAttachment :+: FFile :+: String :+: CNil
+
lazy val logger: Logger = Logger(getClass)
override val entityName: String = "observable"
override val publicProperties: PublicProperties = properties.observable
@@ -79,11 +85,12 @@ class ObservableCtrl @Inject() (
(observableSteps, authContext) => observableSteps.filteredSimilar.visible(organisationSrv)(authContext)
),
Query[Traversal.V[Observable], Traversal.V[Case]]("case", (observableSteps, _) => observableSteps.`case`),
- Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert)
+ Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert),
+ Query[Traversal.V[Observable], Traversal.V[Share]]("shares", (observableSteps, authContext) => observableSteps.shares.visible(authContext))
)
- def create(caseId: String): Action[AnyContent] =
- entrypoint("create observable")
+ def createInCase(caseId: String): Action[AnyContent] =
+ entrypoint("create observable in case")
.extract("observable", FieldsParser[InputObservable])
.extract("isZip", FieldsParser.boolean.optional.on("isZip"))
.extract("zipPassword", FieldsParser.string.optional.on("zipPassword"))
@@ -109,10 +116,10 @@ class ObservableCtrl @Inject() (
val successesAndFailures =
if (observableType.isAttachment)
inputAttachObs
- .flatMap(obs => obs.attachment.map(createAttachmentObservable(case0, obs, _)))
+ .flatMap(obs => obs.attachment.map(createAttachmentObservableInCase(case0, obs, _)))
else
inputAttachObs
- .flatMap(obs => obs.data.map(createSimpleObservable(case0, obs, _)))
+ .flatMap(obs => obs.data.map(createSimpleObservableInCase(case0, obs, _)))
val (successes, failures) = successesAndFailures
.foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) {
case ((s, f), Right(o)) => (s :+ o, f)
@@ -123,7 +130,7 @@ class ObservableCtrl @Inject() (
}
}
- def createSimpleObservable(
+ private def createSimpleObservableInCase(
`case`: Case with Entity,
inputObservable: InputObservable,
data: String
@@ -136,7 +143,7 @@ class ObservableCtrl @Inject() (
case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data)))
}
- def createAttachmentObservable(
+ private def createAttachmentObservableInCase(
`case`: Case with Entity,
inputObservable: InputObservable,
fileOrAttachment: Either[FFile, InputAttachment]
@@ -159,12 +166,119 @@ class ObservableCtrl @Inject() (
Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename))))
}
+ def createInAlert(alertId: String): Action[AnyContent] =
+ entrypoint("create artifact in alert")
+ .extract("artifact", FieldsParser[InputObservable])
+ .extract("isZip", FieldsParser.boolean.optional.on("isZip"))
+ .extract("zipPassword", FieldsParser.string.optional.on("zipPassword"))
+ .auth { implicit request =>
+ val inputObservable: InputObservable = request.body("artifact")
+ val isZip: Option[Boolean] = request.body("isZip")
+ val zipPassword: Option[String] = request.body("zipPassword")
+ val inputAttachObs = if (isZip.contains(true)) getZipFiles(inputObservable, zipPassword) else Seq(inputObservable)
+
+ db
+ .roTransaction { implicit graph =>
+ for {
+ alert <-
+ alertSrv
+ .get(EntityIdOrName(alertId))
+ .can(organisationSrv, Permissions.manageAlert)
+ .orFail(AuthorizationError("Operation not permitted"))
+ observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType))
+ } yield (alert, observableType)
+ }
+ .map {
+ case (alert, observableType) =>
+ val successesAndFailures =
+ if (observableType.isAttachment)
+ inputAttachObs
+ .flatMap { obs =>
+ (obs.attachment.map(_.fold(Coproduct[AnyAttachmentType](_), Coproduct[AnyAttachmentType](_))) ++
+ obs.data.map(Coproduct[AnyAttachmentType](_)))
+ .map(createAttachmentObservableInAlert(alert, obs, _))
+ }
+ else
+ inputAttachObs
+ .flatMap(obs => obs.data.map(createSimpleObservableInAlert(alert, obs, _)))
+ val (successes, failures) = successesAndFailures
+ .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) {
+ case ((s, f), Right(o)) => (s :+ o, f)
+ case ((s, f), Left(o)) => (s, f :+ o)
+ }
+ if (failures.isEmpty) Results.Created(JsArray(successes))
+ else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures))
+ }
+ }
+
+ private def createSimpleObservableInAlert(
+ alert: Alert with Entity,
+ inputObservable: InputObservable,
+ data: String
+ )(implicit authContext: AuthContext): Either[JsValue, JsValue] =
+ db
+ .tryTransaction { implicit graph =>
+ alertSrv.createObservable(alert, inputObservable.toObservable, data)
+ } match {
+ case Success(o) => Right(o.toJson)
+ case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data)))
+ }
+
+ private def createAttachmentObservableInAlert(
+ alert: Alert with Entity,
+ inputObservable: InputObservable,
+ attachment: AnyAttachmentType
+ )(implicit authContext: AuthContext): Either[JsValue, JsValue] =
+ db
+ .tryTransaction { implicit graph =>
+ object createAttachment extends Poly1 {
+ implicit val fromFile: Case.Aux[FFile, Try[RichObservable]] = at[FFile] { file =>
+ alertSrv.createObservable(alert, inputObservable.toObservable, file)
+ }
+ implicit val fromAttachment: Case.Aux[InputAttachment, Try[RichObservable]] = at[InputAttachment] { attachment =>
+ for {
+ attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id)
+ obs <- alertSrv.createObservable(alert, inputObservable.toObservable, attach)
+ } yield obs
+ }
+
+ implicit val fromString: Case.Aux[String, Try[RichObservable]] = at[String] { data =>
+ data.split(';') match {
+ case Array(filename, contentType, value) =>
+ val data = Base64.getDecoder.decode(value)
+ attachmentSrv
+ .create(filename, contentType, data)
+ .flatMap(attachment => alertSrv.createObservable(alert, inputObservable.toObservable, attachment))
+ case Array(filename, contentType) =>
+ attachmentSrv
+ .create(filename, contentType, Array.emptyByteArray)
+ .flatMap(attachment => alertSrv.createObservable(alert, inputObservable.toObservable, attachment))
+ case data =>
+ Failure(InvalidFormatAttributeError("artifacts.data", "filename;contentType;base64value", Set.empty, FString(data.mkString(";"))))
+ }
+ }
+ }
+ attachment.fold(createAttachment)
+ } match {
+ case Success(o) => Right(o.toJson)
+ case _ =>
+ object attachmentName extends Poly1 {
+ implicit val fromFile: Case.Aux[FFile, String] = at[FFile](_.filename)
+ implicit val fromAttachment: Case.Aux[InputAttachment, String] = at[InputAttachment](_.name)
+ implicit val fromString: Case.Aux[String, String] = at[String] { data =>
+ if (data.contains(';')) data.takeWhile(_ != ';') else "no name"
+ }
+ }
+ val filename = attachment.fold(attachmentName)
+ Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename))))
+ }
+
def get(observableId: String): Action[AnyContent] =
entrypoint("get observable")
- .authRoTransaction(db) { _ => implicit graph =>
+ .authRoTransaction(db) { implicit request => implicit graph =>
observableSrv
.get(EntityIdOrName(observableId))
- // .availableFor(request.organisation)
+ .visible(organisationSrv)
.richObservable
.getOrFail("Observable")
.map { observable =>
@@ -178,10 +292,7 @@ class ObservableCtrl @Inject() (
.authTransaction(db) { implicit request => implicit graph =>
val propertyUpdaters: Seq[PropertyUpdater] = request.body("observable")
observableSrv
- .update(
- _.get(EntityIdOrName(observableId)).can(Permissions.manageObservable),
- propertyUpdaters
- )
+ .update(_.get(EntityIdOrName(observableId)).canManage(organisationSrv), propertyUpdaters)
.map(_ => Results.NoContent)
}
@@ -195,19 +306,19 @@ class ObservableCtrl @Inject() (
ids
.toTry { id =>
observableSrv
- .update(_.get(EntityIdOrName(id)).can(Permissions.manageObservable), properties)
+ .update(_.get(EntityIdOrName(id)).canManage(organisationSrv), properties)
}
.map(_ => Results.NoContent)
}
- def delete(obsId: String): Action[AnyContent] =
+ def delete(observableId: String): Action[AnyContent] =
entrypoint("delete")
.authTransaction(db) { implicit request => implicit graph =>
for {
observable <-
observableSrv
- .get(EntityIdOrName(obsId))
- .can(Permissions.manageObservable)
+ .get(EntityIdOrName(observableId))
+ .canManage(organisationSrv)
.getOrFail("Observable")
_ <- observableSrv.remove(observable)
} yield Results.NoContent
diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala
new file mode 100644
index 0000000000..c07ea46b3f
--- /dev/null
+++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala
@@ -0,0 +1,129 @@
+package org.thp.thehive.controllers.v1
+
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser}
+import org.thp.scalligraph.models.{Database, Entity}
+import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query}
+import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs
+import org.thp.scalligraph.traversal.{Graph, IteratorOutput, Traversal}
+import org.thp.scalligraph.{BadRequestError, EntityIdOrName}
+import org.thp.thehive.controllers.v1.Conversion._
+import org.thp.thehive.dto.v1.InputPattern
+import org.thp.thehive.models.{Pattern, Permissions, RichPattern}
+import org.thp.thehive.services.PatternOps._
+import org.thp.thehive.services.PatternSrv
+import play.api.libs.json.{JsArray, Json}
+import play.api.mvc.{Action, AnyContent, Results}
+
+import java.io.FileInputStream
+import javax.inject.{Inject, Named, Singleton}
+import scala.util.{Failure, Success, Try}
+
+@Singleton
+class PatternCtrl @Inject() (
+ entrypoint: Entrypoint,
+ properties: Properties,
+ patternSrv: PatternSrv,
+ @Named("with-thehive-schema") implicit val db: Database
+) extends QueryableCtrl {
+ override val entityName: String = "pattern"
+ override val publicProperties: PublicProperties = properties.pattern
+ override val initialQuery: Query = Query.init[Traversal.V[Pattern]](
+ "listPattern",
+ (graph, _) =>
+ patternSrv
+ .startTraversal(graph)
+ )
+ override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Pattern], IteratorOutput](
+ "page",
+ FieldsParser[OutputParam],
+ (range, patternSteps, _) => patternSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richPattern)
+ )
+ override val outputQuery: Query = Query.output[RichPattern, Traversal.V[Pattern]](_.richPattern)
+ override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Pattern]](
+ "getPattern",
+ FieldsParser[EntityIdOrName],
+ (idOrName, graph, _) => patternSrv.get(idOrName)(graph)
+ )
+
+ def importMitre: Action[AnyContent] =
+ entrypoint("import MITRE ATT&CK patterns")
+ .extract("file", FieldsParser.file.on("file"))
+ .authPermitted(Permissions.managePattern) { implicit request =>
+ val file: FFile = request.body("file")
+
+ for {
+ inputPatterns <- parseJsonFile(file)
+ richPatterns =
+ inputPatterns
+ .sortBy(_.external_id.length) // sort to create sub-patterns after their parent
+ .foldLeft[JsArray](JsArray.empty) { (array, inputPattern) =>
+ val res = db.tryTransaction { implicit graph =>
+ createFromInput(inputPattern)
+ } match {
+ case Failure(e) =>
+ Json.obj("status" -> "Failure", "message" -> e.getMessage)
+ case Success(t) =>
+ Json.obj("status" -> "Success", "mitreId" -> t.patternId, "patternName" -> t.name)
+ }
+ array :+ res
+ }
+ } yield Results.Created(richPatterns)
+ }
+
+ def get(patternId: String): Action[AnyContent] =
+ entrypoint("get pattern")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ patternSrv
+ .get(EntityIdOrName(patternId))
+ .richPattern
+ .getOrFail("Pattern")
+ .map(richPattern => Results.Ok(richPattern.toJson))
+ }
+
+ def getCasePatterns(caseId: String): Action[AnyContent] =
+ entrypoint("get case patterns")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ for {
+ patternIds <- patternSrv.getCasePatterns(caseId)
+ } yield Results.Ok(patternIds.toJson)
+ }
+
+ def delete(patternId: String): Action[AnyContent] =
+ entrypoint("delete pattern")
+ .authPermittedTransaction(db, Permissions.managePattern) { implicit request => implicit graph =>
+ patternSrv
+ .getOrFail(EntityIdOrName(patternId))
+ .flatMap(patternSrv.remove)
+ .map(_ => Results.NoContent)
+ }
+
+ private def parseJsonFile(file: FFile): Try[Seq[InputPattern]] =
+ for {
+ json <- Try(Json.parse(new FileInputStream(file.filepath.toString)))
+ } yield (json \ "objects").as[Seq[InputPattern]]
+
+ private def createFromInput(inputPattern: InputPattern)(implicit graph: Graph, authContext: AuthContext): Try[Pattern with Entity] =
+ if (inputPattern.external_id.isEmpty)
+ Failure(BadRequestError(s"A pattern with no MITRE id cannot be imported"))
+ else if (patternSrv.startTraversal.alreadyImported(inputPattern.external_id))
+ Failure(BadRequestError(s"A pattern with MITRE id '${inputPattern.external_id}' already exists in this organisation"))
+ else
+ for {
+ pattern <- patternSrv.createEntity(inputPattern.toPattern)
+ _ = if (inputPattern.x_mitre_is_subtechnique.getOrElse(false)) linkPattern(pattern)
+ } yield pattern
+
+ private def linkPattern(child: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = {
+ val firstDot = child.patternId.indexOf(".")
+ if (firstDot == -1)
+ Failure(BadRequestError(s"Invalid sub-pattern patternId ${child.patternId} (must contain a dot"))
+ else {
+ val parentId = child.patternId.substring(0, firstDot)
+ for {
+ parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern")
+ _ <- patternSrv.setParent(child, parent)
+ } yield ()
+ }
+ }
+}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala
new file mode 100644
index 0000000000..ba10c6ce90
--- /dev/null
+++ b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala
@@ -0,0 +1,74 @@
+package org.thp.thehive.controllers.v1
+
+import org.thp.scalligraph.EntityIdOrName
+import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser}
+import org.thp.scalligraph.models.Database
+import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query}
+import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs
+import org.thp.scalligraph.traversal.{IteratorOutput, Traversal}
+import org.thp.thehive.controllers.v1.Conversion._
+import org.thp.thehive.dto.v1.InputProcedure
+import org.thp.thehive.models.{Permissions, Procedure, RichProcedure}
+import org.thp.thehive.services.ProcedureOps._
+import org.thp.thehive.services.ProcedureSrv
+import play.api.mvc.{Action, AnyContent, Results}
+
+import javax.inject.{Inject, Named, Singleton}
+
+@Singleton
+class ProcedureCtrl @Inject() (
+ entrypoint: Entrypoint,
+ properties: Properties,
+ procedureSrv: ProcedureSrv,
+ @Named("with-thehive-schema") implicit val db: Database
+) extends QueryableCtrl {
+ override val entityName: String = "procedure"
+ override val publicProperties: PublicProperties = properties.procedure
+ override val initialQuery: Query = Query.init[Traversal.V[Procedure]](
+ "listProcedure",
+ (graph, _) =>
+ procedureSrv
+ .startTraversal(graph)
+ )
+ override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Procedure], IteratorOutput](
+ "page",
+ FieldsParser[OutputParam],
+ (range, procedureSteps, _) => procedureSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richProcedure)
+ )
+ override val outputQuery: Query = Query.output[RichProcedure, Traversal.V[Procedure]](_.richProcedure)
+ override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Procedure]](
+ "getProcedure",
+ FieldsParser[EntityIdOrName],
+ (idOrName, graph, _) => procedureSrv.get(idOrName)(graph)
+ )
+
+ def create: Action[AnyContent] =
+ entrypoint("create procedure")
+ .extract("procedure", FieldsParser[InputProcedure])
+ .authPermittedTransaction(db, Permissions.manageProcedure) { implicit request => implicit graph =>
+ val inputProcedure: InputProcedure = request.body("procedure")
+ for {
+ richProcedure <- procedureSrv.create(inputProcedure.toProcedure, inputProcedure.caseId, inputProcedure.patternId)
+ } yield Results.Created(richProcedure.toJson)
+ }
+
+ def get(procedureId: String): Action[AnyContent] =
+ entrypoint("get procedure")
+ .authRoTransaction(db) { _ => implicit graph =>
+ procedureSrv
+ .get(EntityIdOrName(procedureId))
+ .richProcedure
+ .getOrFail("Procedure")
+ .map(richProcedure => Results.Ok(richProcedure.toJson))
+ }
+
+ def delete(procedureId: String): Action[AnyContent] =
+ entrypoint("delete procedure")
+ .authPermittedTransaction(db, Permissions.manageProcedure) { implicit request => implicit graph =>
+ procedureSrv
+ .getOrFail(EntityIdOrName(procedureId))
+ .flatMap(procedureSrv.remove)
+ .map(_ => Results.NoContent)
+ }
+
+}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala
index 4a39b93c6a..43c9a1bfce 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala
@@ -1,9 +1,9 @@
package org.thp.thehive.controllers.v1
+import org.apache.tinkerpop.gremlin.structure.Vertex
import org.thp.scalligraph.controllers.{FPathElem, FPathEmpty, FieldsParser}
import org.thp.scalligraph.models.{Database, UMapping}
import org.thp.scalligraph.query.{PublicProperties, PublicPropertyListBuilder}
-import org.thp.scalligraph.traversal.Converter
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq}
import org.thp.thehive.models._
@@ -15,12 +15,13 @@ import org.thp.thehive.services.CustomFieldOps._
import org.thp.thehive.services.LogOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
+import org.thp.thehive.services.ShareOps._
import org.thp.thehive.services.TaskOps._
+import org.thp.thehive.services.TaxonomyOps._
import org.thp.thehive.services.UserOps._
import org.thp.thehive.services._
import play.api.libs.json.{JsObject, Json}
-import java.lang.{Long => JLong}
import javax.inject.{Inject, Singleton}
import scala.util.Failure
@@ -115,6 +116,14 @@ class Properties @Inject() (
} yield Json.obj("customFields" -> values)
case _ => Failure(BadRequestError("Invalid custom fields format"))
})
+ .property("case", db.idMapping)(_.select(_.`case`._id).readonly)
+ .property("imported", UMapping.boolean)(_.select(_.imported).readonly)
+ .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly)
+ .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly)
+ .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly)
.build
lazy val audit: PublicProperties =
@@ -210,66 +219,11 @@ class Properties @Inject() (
} yield Json.obj("customFields" -> values)
case _ => Failure(BadRequestError("Invalid custom fields format"))
})
- .property("computed.handlingDurationInDays", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(86400000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInHours", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(3600000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInMinutes", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(60000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
- .property("computed.handlingDurationInSeconds", UMapping.long)(
- _.select(
- _.coalesceIdent(
- _.has(_.endDate)
- .sack(
- (_: JLong, endDate: JLong) => endDate,
- _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
- )
- .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
- .sack((_: Long) / (_: Long), _.by(_.constant(1000L)))
- .sack[Long],
- _.constant(0L)
- )
- ).readonly
- )
+ .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly)
+ .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly)
+ .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly)
.property("viewingOrganisation", UMapping.string)(
_.authSelect((cases, authContext) => cases.organisations.visible(authContext).value(_.name)).readonly
)
@@ -319,12 +273,42 @@ class Properties @Inject() (
.property("description", UMapping.string)(_.field.updatable)
.build
+ lazy val pattern: PublicProperties =
+ PublicPropertyListBuilder[Pattern]
+ .property("patternId", UMapping.string)(_.field.readonly)
+ .property("name", UMapping.string)(_.field.readonly)
+ .property("description", UMapping.string.optional)(_.field.updatable)
+ .property("tactics", UMapping.string.set)(_.field.readonly)
+ .property("url", UMapping.string)(_.field.updatable)
+ .property("patternType", UMapping.string)(_.field.readonly)
+ .property("platforms", UMapping.string.sequence)(_.field.readonly)
+ .property("dataSources", UMapping.string.sequence)(_.field.readonly)
+ .property("version", UMapping.string.optional)(_.field.readonly)
+ .build
+
+ lazy val procedure: PublicProperties =
+ PublicPropertyListBuilder[Procedure]
+ .property("description", UMapping.string)(_.field.updatable)
+ .property("occurence", UMapping.date)(_.field.readonly)
+ .build
+
lazy val profile: PublicProperties =
PublicPropertyListBuilder[Profile]
.property("name", UMapping.string)(_.field.updatable)
.property("permissions", UMapping.string.set)(_.field.updatable)
.build
+ lazy val share: PublicProperties =
+ PublicPropertyListBuilder[Share]
+ .property("caseId", UMapping.entityId)(_.select(_.`case`._id).readonly)
+ .property("caseNumber", UMapping.int)(_.select(_.`case`.value(_.number)).readonly)
+ .property("organisationId", UMapping.entityId)(_.select(_.organisation._id).readonly)
+ .property("organisationName", UMapping.string)(_.select(_.organisation.value(_.name)).readonly)
+ .property("profileId", UMapping.entityId)(_.select(_.profile._id).readonly)
+ .property("profileName", UMapping.string)(_.select(_.profile.value(_.name)).readonly)
+ .property("owner", UMapping.boolean)(_.field.readonly)
+ .build
+
lazy val task: PublicProperties =
PublicPropertyListBuilder[Task]
.property("title", UMapping.string)(_.field.updatable)
@@ -401,4 +385,20 @@ class Properties @Inject() (
.property("attachment.contentType", UMapping.string.optional)(_.select(_.attachments.value(_.contentType)).readonly)
.property("attachment.id", UMapping.string.optional)(_.select(_.attachments.value(_.attachmentId)).readonly)
.build
+
+ lazy val taxonomy: PublicProperties =
+ PublicPropertyListBuilder[Taxonomy]
+ .property("namespace", UMapping.string)(_.field.readonly)
+ .property("description", UMapping.string)(_.field.readonly)
+ .property("version", UMapping.int)(_.field.readonly)
+ .property("enabled", UMapping.boolean)(_.select(_.enabled).readonly)
+ .build
+
+ private def vertexToTag: Vertex => String = { v =>
+ val namespace = UMapping.string.getProperty(v, "namespace")
+ val predicate = UMapping.string.getProperty(v, "predicate")
+ val value = UMapping.string.optional.getProperty(v, "value")
+ Tag(namespace, predicate, value, None, "#000000").toString
+ }
+
}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala
index 82f1ad699a..a7be1d3717 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala
@@ -1,10 +1,11 @@
package org.thp.thehive.controllers.v1
-import javax.inject.{Inject, Singleton}
import play.api.routing.Router.Routes
import play.api.routing.SimpleRouter
import play.api.routing.sird._
+import javax.inject.{Inject, Singleton}
+
@Singleton
class Router @Inject() (
authenticationCtrl: AuthenticationCtrl,
@@ -23,13 +24,17 @@ class Router @Inject() (
organisationCtrl: OrganisationCtrl,
// pageCtrl: PageCtrl,
// permissionCtrl: PermissionCtrl,
+ patternCtrl: PatternCtrl,
+ procedureCtrl: ProcedureCtrl,
profileCtrl: ProfileCtrl,
taskCtrl: TaskCtrl,
+ shareCtrl: ShareCtrl,
+ taxonomyCtrl: TaxonomyCtrl,
// shareCtrl: ShareCtrl,
userCtrl: UserCtrl,
statusCtrl: StatusCtrl
// streamCtrl: StreamCtrl,
- // tagCtrl: TagCtrl
+ // tagCtrl: TagCtrl,
) extends SimpleRouter {
override def routes: Routes = {
@@ -52,13 +57,14 @@ class Router @Inject() (
// case POST(p"/case/_stats") => caseCtrl.stats()
// case GET(p"/case/$caseId/links") => caseCtrl.linkedCases(caseId)
- case POST(p"/case/$caseId/observable") => observableCtrl.create(caseId)
+ case POST(p"/case/$caseId/observable") => observableCtrl.createInCase(caseId)
+ case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId)
case GET(p"/observable/$observableId") => observableCtrl.get(observableId)
case DELETE(p"/observable/$observableId") => observableCtrl.delete(observableId)
case PATCH(p"/observable/_bulk") => observableCtrl.bulkUpdate
case PATCH(p"/observable/$observableId") => observableCtrl.update(observableId)
// case GET(p"/observable/$observableId/similar") => observableCtrl.findSimilar(observableId)
-// case POST(p"/observable/$observableId/shares") => shareCtrl.shareObservable(observableId)
+ case POST(p"/observable/$observableId/shares") => shareCtrl.shareObservable(observableId)
case GET(p"/caseTemplate") => caseTemplateCtrl.list
case POST(p"/caseTemplate") => caseTemplateCtrl.create
@@ -83,18 +89,25 @@ class Router @Inject() (
case GET(p"/organisation/$organisationId") => organisationCtrl.get(organisationId)
case PATCH(p"/organisation/$organisationId") => organisationCtrl.update(organisationId)
-// case GET(p"/share") => shareCtrl.list
-// case POST(p"/share") => shareCtrl.create
-// case GET(p"/share/$shareId") => shareCtrl.get(shareId)
-// case PATCH(p"/share/$shareId") => shareCtrl.update(shareId)
-
- case GET(p"/task") => taskCtrl.list
- case POST(p"/task") => taskCtrl.create
- case GET(p"/task/$taskId") => taskCtrl.get(taskId)
- case PATCH(p"/task/$taskId") => taskCtrl.update(taskId)
- case GET(p"/task/$taskId/actionRequired") => taskCtrl.isActionRequired(taskId)
- case PUT(p"/task/$taskId/actionRequired/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = true)
- case PUT(p"/task/$taskId/actionDone/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = false)
+ case DELETE(p"/case/shares") => shareCtrl.removeShares()
+ case POST(p"/case/$caseId/shares") => shareCtrl.shareCase(caseId)
+ case DELETE(p"/case/$caseId/shares") => shareCtrl.removeShares(caseId)
+ case DELETE(p"/task/$taskId/shares") => shareCtrl.removeTaskShares(taskId)
+ case DELETE(p"/observable/$observableId/shares") => shareCtrl.removeObservableShares(observableId)
+ case GET(p"/case/$caseId/shares") => shareCtrl.listShareCases(caseId)
+ case GET(p"/case/$caseId/task/$taskId/shares") => shareCtrl.listShareTasks(caseId, taskId)
+ case GET(p"/case/$caseId/observable/$observableId/shares") => shareCtrl.listShareObservables(caseId, observableId)
+ case POST(p"/case/task/$taskId/shares") => shareCtrl.shareTask(taskId)
+ case DELETE(p"/case/share/$shareId") => shareCtrl.removeShare(shareId)
+ case PATCH(p"/case/share/$shareId") => shareCtrl.updateShare(shareId)
+
+ case GET(p"/task") => taskCtrl.list
+ case POST(p"/task") => taskCtrl.create
+ case GET(p"/task/$taskId") => taskCtrl.get(taskId)
+ case PATCH(p"/task/$taskId") => taskCtrl.update(taskId)
+ case GET(p"/task/$taskId/actionRequired") => taskCtrl.isActionRequired(taskId)
+ case PUT(p"/task/$taskId/actionRequired/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = true)
+ case PUT(p"/task/$taskId/actionDone/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = false)
// POST /case/:caseId/task/_search controllers.TaskCtrl.findInCase(caseId)
// POST /case/task/_stats controllers.TaskCtrl.stats()
@@ -117,11 +130,27 @@ class Router @Inject() (
// DELETE /alert/:alertId controllers.AlertCtrl.delete(alertId)
// POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId)
+ case POST(p"/taxonomy") => taxonomyCtrl.create
+ case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip
+ case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId)
+ case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.toggleActivation(taxoId, isActive = true)
+ case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.toggleActivation(taxoId, isActive = false)
+ case DELETE(p"/taxonomy/$taxoId") => taxonomyCtrl.delete(taxoId)
+
case GET(p"/audit") => auditCtrl.flow
-// GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int])
-// GET /audit controllers.AuditCtrl.find()
-// POST /audit/_search controllers.AuditCtrl.find()
-// POST /audit/_stats controllers.AuditCtrl.stats()
+ // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int])
+ // GET /audit controllers.AuditCtrl.find()
+ // POST /audit/_search controllers.AuditCtrl.find()
+ // POST /audit/_stats controllers.AuditCtrl.stats()
+
+ case POST(p"/pattern/import/attack") => patternCtrl.importMitre
+ case GET(p"/pattern/$patternId") => patternCtrl.get(patternId)
+ case GET(p"/pattern/case/$caseId") => patternCtrl.getCasePatterns(caseId)
+ case DELETE(p"/pattern/$patternId") => patternCtrl.delete(patternId)
+
+ case POST(p"/procedure") => procedureCtrl.create
+ case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId)
+ case DELETE(p"/procedure/$procedureId") => procedureCtrl.delete(procedureId)
case POST(p"/profile") => profileCtrl.create
case GET(p"/profile/$profileId") => profileCtrl.get(profileId)
diff --git a/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala
new file mode 100644
index 0000000000..d9fa073a63
--- /dev/null
+++ b/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala
@@ -0,0 +1,265 @@
+package org.thp.thehive.controllers.v1
+
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser}
+import org.thp.scalligraph.models.Database
+import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query}
+import org.thp.scalligraph.traversal.TraversalOps._
+import org.thp.scalligraph.traversal.{Graph, IteratorOutput, Traversal}
+import org.thp.scalligraph.{AuthorizationError, BadRequestError, EntityIdOrName, RichSeq}
+import org.thp.thehive.controllers.v1.Conversion._
+import org.thp.thehive.dto.v1.{InputShare, ObservablesFilter, TasksFilter}
+import org.thp.thehive.models._
+import org.thp.thehive.services.CaseOps._
+import org.thp.thehive.services.ObservableOps._
+import org.thp.thehive.services.OrganisationOps._
+import org.thp.thehive.services.ShareOps._
+import org.thp.thehive.services.TaskOps._
+import org.thp.thehive.services._
+import play.api.mvc.{Action, AnyContent, Results}
+
+import javax.inject.{Inject, Named}
+import scala.util.{Failure, Success, Try}
+
+class ShareCtrl @Inject() (
+ entrypoint: Entrypoint,
+ shareSrv: ShareSrv,
+ properties: Properties,
+ organisationSrv: OrganisationSrv,
+ caseSrv: CaseSrv,
+ taskSrv: TaskSrv,
+ observableSrv: ObservableSrv,
+ profileSrv: ProfileSrv,
+ @Named("with-thehive-schema") implicit val db: Database
+) extends QueryableCtrl {
+ override val entityName: String = "share"
+ override val publicProperties: PublicProperties = properties.share
+ override val initialQuery: Query =
+ Query.init[Traversal.V[Share]]("listShare", (graph, authContext) => organisationSrv.startTraversal(graph).visible(authContext).shares)
+ override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Share], IteratorOutput](
+ "page",
+ FieldsParser[OutputParam],
+ (range, shareSteps, _) => shareSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richShare)
+ )
+ override val outputQuery: Query = Query.outputWithContext[RichShare, Traversal.V[Share]]((shareSteps, _) => shareSteps.richShare)
+ override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Share]](
+ "getShare",
+ FieldsParser[EntityIdOrName],
+ (idOrName, graph, authContext) => shareSrv.get(idOrName)(graph).visible(authContext)
+ )
+ override val extraQueries: Seq[ParamQuery[_]] = Seq(
+ Query[Traversal.V[Share], Traversal.V[Case]]("case", (shareSteps, _) => shareSteps.`case`),
+ Query[Traversal.V[Share], Traversal.V[Observable]]("observables", (shareSteps, _) => shareSteps.observables),
+ Query[Traversal.V[Share], Traversal.V[Task]]("tasks", (shareSteps, _) => shareSteps.tasks),
+ Query[Traversal.V[Share], Traversal.V[Organisation]]("organisation", (shareSteps, _) => shareSteps.organisation)
+ )
+
+ def shareCase(caseId: String): Action[AnyContent] =
+ entrypoint("create case shares")
+ .extract("shares", FieldsParser[InputShare].sequence.on("shares"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val inputShares: Seq[InputShare] = request.body("shares")
+ caseSrv
+ .get(EntityIdOrName(caseId))
+ .can(Permissions.manageShare)
+ .getOrFail("Case")
+ .flatMap { `case` =>
+ inputShares.toTry { inputShare =>
+ for {
+ organisation <-
+ organisationSrv
+ .get(request.organisation)
+ .visibleOrganisationsFrom
+ .get(EntityIdOrName(inputShare.organisationName))
+ .getOrFail("Organisation")
+ profile <- profileSrv.getOrFail(EntityIdOrName(inputShare.profile))
+ share <- shareSrv.shareCase(owner = false, `case`, organisation, profile)
+ richShare <- shareSrv.get(share).richShare.getOrFail("Share")
+ _ <- if (inputShare.tasks == TasksFilter.all) shareSrv.shareCaseTasks(share) else Success(Nil)
+ _ <- if (inputShare.observables == ObservablesFilter.all) shareSrv.shareCaseObservables(share) else Success(Nil)
+ } yield richShare
+ }
+ }
+ .map(shares => Results.Ok(shares.toJson))
+ }
+
+ def removeShare(shareId: String): Action[AnyContent] =
+ entrypoint("remove share")
+ .authTransaction(db) { implicit request => implicit graph =>
+ doRemoveShare(EntityIdOrName(shareId)).map(_ => Results.NoContent)
+ }
+
+ def removeShares(): Action[AnyContent] =
+ entrypoint("remove share")
+ .extract("shares", FieldsParser[String].sequence.on("ids"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val shareIds: Seq[String] = request.body("shares")
+ shareIds.map(EntityIdOrName.apply).toTry(doRemoveShare(_)).map(_ => Results.NoContent)
+ }
+
+ def removeShares(caseId: String): Action[AnyContent] =
+ entrypoint("remove share")
+ .extract("organisations", FieldsParser[String].sequence.on("organisations"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val organisations: Seq[String] = request.body("organisations")
+ organisations
+ .map(EntityIdOrName(_))
+ .toTry { organisationId =>
+ for {
+ organisation <- organisationSrv.get(organisationId).getOrFail("Organisation")
+ _ <-
+ if (request.organisation.fold(_ == organisation._id, _ == organisation.name))
+ Failure(BadRequestError("You cannot remove your own share"))
+ else Success(())
+ shareId <-
+ caseSrv
+ .get(EntityIdOrName(caseId))
+ .can(Permissions.manageShare)
+ .share(organisationId)
+ .has(_.owner, false)
+ ._id
+ .orFail(AuthorizationError("Operation not permitted"))
+ _ <- shareSrv.remove(shareId)
+ } yield ()
+ }
+ .map(_ => Results.NoContent)
+ }
+
+ def removeTaskShares(taskId: String): Action[AnyContent] =
+ entrypoint("remove share tasks")
+ .extract("organisations", FieldsParser[String].sequence.on("organisations"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val organisations: Seq[String] = request.body("organisations")
+
+ taskSrv
+ .getOrFail(EntityIdOrName(taskId))
+ .flatMap { task =>
+ organisations.toTry { organisationName =>
+ organisationSrv
+ .getOrFail(EntityIdOrName(organisationName))
+ .flatMap(shareSrv.removeShareTasks(task, _))
+ }
+ }
+ .map(_ => Results.NoContent)
+ }
+
+ def removeObservableShares(observableId: String): Action[AnyContent] =
+ entrypoint("remove share observables")
+ .extract("organisations", FieldsParser[String].sequence.on("organisations"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val organisations: Seq[String] = request.body("organisations")
+
+ observableSrv
+ .getOrFail(EntityIdOrName(observableId))
+ .flatMap { observable =>
+ organisations.toTry { organisationName =>
+ organisationSrv
+ .getOrFail(EntityIdOrName(organisationName))
+ .flatMap(shareSrv.removeShareObservable(observable, _))
+ }
+ }
+ .map(_ => Results.NoContent)
+ }
+
+ private def doRemoveShare(shareId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] =
+ if (!shareSrv.get(shareId).`case`.can(Permissions.manageShare).exists)
+ Failure(AuthorizationError("You are not authorized to remove share"))
+ else if (shareSrv.get(shareId).byOrganisation(authContext.organisation).exists)
+ Failure(AuthorizationError("You can't remove your share"))
+ else if (shareSrv.get(shareId).has(_.owner, true).exists)
+ Failure(AuthorizationError("You can't remove initial shares"))
+ else
+ shareSrv.remove(shareId)
+
+ def updateShare(shareId: String): Action[AnyContent] =
+ entrypoint("update share")
+ .extract("profile", FieldsParser.string.on("profile"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val profile: String = request.body("profile")
+ if (!shareSrv.get(EntityIdOrName(shareId)).`case`.can(Permissions.manageShare).exists)
+ Failure(AuthorizationError("You are not authorized to remove share"))
+ for {
+ richShare <-
+ shareSrv
+ .get(EntityIdOrName(shareId))
+ .filter(_.organisation.visibleOrganisationsTo.visible)
+ .richShare
+ .getOrFail("Share")
+ profile <- profileSrv.getOrFail(EntityIdOrName(profile))
+ _ <- shareSrv.updateProfile(richShare.share, profile)
+ } yield Results.Ok
+ }
+
+ def listShareCases(caseId: String): Action[AnyContent] =
+ entrypoint("list case shares")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ val shares = caseSrv
+ .get(EntityIdOrName(caseId))
+ .shares
+ .visible
+ .filterNot(_.get(request.organisation))
+ .richShare
+ .toSeq
+
+ Success(Results.Ok(shares.toJson))
+ }
+
+ def listShareTasks(caseId: String, taskId: String): Action[AnyContent] =
+ entrypoint("list task shares")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ val shares = caseSrv
+ .get(EntityIdOrName(caseId))
+ .can(Permissions.manageShare)
+ .shares
+ .visible
+ .filterNot(_.get(request.organisation))
+ .byTask(EntityIdOrName(taskId))
+ .richShare
+ .toSeq
+
+ Success(Results.Ok(shares.toJson))
+ }
+
+ def listShareObservables(caseId: String, observableId: String): Action[AnyContent] =
+ entrypoint("list observable shares")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ val shares = caseSrv
+ .get(EntityIdOrName(caseId))
+ .can(Permissions.manageShare)
+ .shares
+ .visible
+ .filterNot(_.get(request.organisation))
+ .byObservable(EntityIdOrName(observableId))
+ .richShare
+ .toSeq
+
+ Success(Results.Ok(shares.toJson))
+ }
+
+ def shareTask(taskId: String): Action[AnyContent] =
+ entrypoint("share task")
+ .extract("organisations", FieldsParser.string.sequence.on("organisations"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val organisationIds: Seq[String] = request.body("organisations")
+
+ for {
+ task <- taskSrv.getOrFail(EntityIdOrName(taskId))
+ _ <- taskSrv.get(task).`case`.can(Permissions.manageShare).existsOrFail
+ organisations <- organisationIds.map(EntityIdOrName(_)).toTry(organisationSrv.get(_).visible.getOrFail("Organisation"))
+ _ <- shareSrv.addTaskShares(task, organisations)
+ } yield Results.NoContent
+ }
+
+ def shareObservable(observableId: String): Action[AnyContent] =
+ entrypoint("share observable")
+ .extract("organisations", FieldsParser.string.sequence.on("organisations"))
+ .authTransaction(db) { implicit request => implicit graph =>
+ val organisationIds: Seq[String] = request.body("organisations")
+ for {
+ observable <- observableSrv.getOrFail(EntityIdOrName(observableId))
+ _ <- observableSrv.get(observable).`case`.can(Permissions.manageShare).existsOrFail
+ organisations <- organisationIds.map(EntityIdOrName(_)).toTry(organisationSrv.get(_).visible.getOrFail("Organisation"))
+ _ <- shareSrv.addObservableShares(observable, organisations)
+ } yield Results.NoContent
+ }
+}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala
index 4549656c04..591f3bd4cd 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala
@@ -10,8 +10,9 @@ import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.dto.v1.InputTask
import org.thp.thehive.models._
import org.thp.thehive.services.CaseOps._
-import org.thp.thehive.services.OrganisationOps._
import org.thp.thehive.services.CaseTemplateOps._
+import org.thp.thehive.services.OrganisationOps._
+import org.thp.thehive.services.ShareOps._
import org.thp.thehive.services.TaskOps._
import org.thp.thehive.services.{CaseSrv, OrganisationSrv, TaskSrv}
import play.api.mvc.{Action, AnyContent, Results}
@@ -74,7 +75,8 @@ class TaskCtrl @Inject() (
Query[Traversal.V[Task], Traversal.V[Log]]("logs", (taskSteps, _) => taskSteps.logs),
Query[Traversal.V[Task], Traversal.V[Case]]("case", (taskSteps, _) => taskSteps.`case`),
Query[Traversal.V[Task], Traversal.V[CaseTemplate]]("caseTemplate", (taskSteps, authContext) => taskSteps.caseTemplate.visible(authContext)),
- Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext))
+ Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext)),
+ Query[Traversal.V[Task], Traversal.V[Share]]("shares", (taskSteps, authContext) => taskSteps.shares.visible(authContext))
)
def create: Action[AnyContent] =
diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala
new file mode 100644
index 0000000000..8432c08d9a
--- /dev/null
+++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala
@@ -0,0 +1,161 @@
+package org.thp.thehive.controllers.v1
+
+import net.lingala.zip4j.ZipFile
+import net.lingala.zip4j.model.FileHeader
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser}
+import org.thp.scalligraph.models.Database
+import org.thp.scalligraph.query._
+import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs
+import org.thp.scalligraph.traversal.{Graph, IteratorOutput, Traversal}
+import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq}
+import org.thp.thehive.controllers.v1.Conversion._
+import org.thp.thehive.dto.v1.InputTaxonomy
+import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy}
+import org.thp.thehive.services.TaxonomyOps._
+import org.thp.thehive.services.{TagSrv, TaxonomySrv}
+import play.api.libs.json.{JsArray, Json}
+import play.api.mvc.{Action, AnyContent, Results}
+
+import javax.inject.{Inject, Named}
+import scala.collection.JavaConverters._
+import scala.util.{Failure, Success, Try}
+
+class TaxonomyCtrl @Inject() (
+ entrypoint: Entrypoint,
+ properties: Properties,
+ taxonomySrv: TaxonomySrv,
+ tagSrv: TagSrv,
+ @Named("with-thehive-schema") implicit val db: Database
+) extends QueryableCtrl
+ with TaxonomyRenderer {
+
+ override val entityName: String = "taxonomy"
+ override val publicProperties: PublicProperties = properties.taxonomy
+ override val initialQuery: Query =
+ Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => taxonomySrv.startTraversal(graph).visible(authContext))
+ override val getQuery: ParamQuery[EntityIdOrName] =
+ Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]](
+ "getTaxonomy",
+ FieldsParser[EntityIdOrName],
+ (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext)
+ )
+ override val pageQuery: ParamQuery[OutputParam] =
+ Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput](
+ "page",
+ FieldsParser[OutputParam],
+ {
+ case (OutputParam(from, to, extraData), taxoSteps, authContext) =>
+ taxoSteps.richPage(from, to, extraData.contains("total")) {
+ _.richTaxonomyWithCustomRenderer(taxoStatsRenderer(extraData - "total"))
+ }
+ }
+ )
+ override val outputQuery: Query =
+ Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, _) => traversal.richTaxonomy)
+ override val extraQueries: Seq[ParamQuery[_]] = Seq(
+ Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags)
+ )
+
+ def create: Action[AnyContent] =
+ entrypoint("import taxonomy")
+ .extract("taxonomy", FieldsParser[InputTaxonomy])
+ .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph =>
+ for {
+ richTaxonomy <- createFromInput(request.body("taxonomy"))
+ } yield Results.Created(richTaxonomy.toJson)
+ }
+
+ def importZip: Action[AnyContent] =
+ entrypoint("import taxonomies zip")
+ .extract("file", FieldsParser.file.on("file"))
+ .authPermitted(Permissions.manageTaxonomy) { implicit request =>
+ val file: FFile = request.body("file")
+ val zipFile = new ZipFile(file.filepath.toString)
+ val headers = zipFile
+ .getFileHeaders
+ .iterator()
+ .asScala
+
+ for {
+ inputTaxos <-
+ headers
+ .filter(h => h.getFileName.endsWith("machinetag.json"))
+ .toTry(parseJsonFile(zipFile, _))
+ richTaxos = inputTaxos.foldLeft[JsArray](JsArray.empty) { (array, taxo) =>
+ val res = db.tryTransaction { implicit graph =>
+ createFromInput(taxo)
+ } match {
+ case Failure(e) =>
+ Json.obj("namespace" -> taxo.namespace, "status" -> "Failure", "message" -> e.getMessage)
+ case Success(t) =>
+ Json.obj("namespace" -> t.namespace, "status" -> "Success", "tagsImported" -> t.tags.size)
+ }
+ array :+ res
+ }
+ } yield Results.Created(richTaxos)
+ }
+
+ private def parseJsonFile(zipFile: ZipFile, h: FileHeader): Try[InputTaxonomy] =
+ Try(Json.parse(zipFile.getInputStream(h)).as[InputTaxonomy]).recoverWith {
+ case _ => Failure(BadRequestError(s"File '${h.getFileName}' does not comply with the MISP taxonomy formatting"))
+ }
+
+ private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = {
+ // Create tags
+ val tagValues = inputTaxo.values.getOrElse(Seq())
+ val tags = tagValues.flatMap { value =>
+ value.entry.map(e => Tag(inputTaxo.namespace, value.predicate, Some(e.value), e.expanded, e.colour.getOrElse(tagSrv.defaultColour)))
+ }
+
+ // Create a tag for predicates with no tags associated
+ val predicateWithNoTags = inputTaxo.predicates.map(_.value).diff(tagValues.map(_.predicate))
+ val allTags = tags ++ predicateWithNoTags.map(p => Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour))
+
+ if (inputTaxo.namespace.isEmpty)
+ Failure(BadRequestError(s"A taxonomy with no namespace cannot be imported"))
+ else if (inputTaxo.namespace.startsWith("_freetags"))
+ Failure(BadRequestError(s"Namespace _freetags is restricted for TheHive"))
+ else if (taxonomySrv.startTraversal.alreadyImported(inputTaxo.namespace))
+ Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation"))
+ else
+ for {
+ tagsEntities <- allTags.toTry(t => tagSrv.create(t))
+ richTaxonomy <- taxonomySrv.create(inputTaxo.toTaxonomy, tagsEntities)
+ } yield richTaxonomy
+ }
+
+ def get(taxonomyId: String): Action[AnyContent] =
+ entrypoint("get taxonomy")
+ .authRoTransaction(db) { implicit request => implicit graph =>
+ taxonomySrv
+ .get(EntityIdOrName(taxonomyId))
+ .visible
+ .richTaxonomy
+ .getOrFail("Taxonomy")
+ .map(taxonomy => Results.Ok(taxonomy.toJson))
+ }
+
+ def toggleActivation(taxonomyId: String, isActive: Boolean): Action[AnyContent] =
+ entrypoint("toggle taxonomy")
+ .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph =>
+ val toggleF = if (isActive) taxonomySrv.activate _ else taxonomySrv.deactivate _
+ toggleF(EntityIdOrName(taxonomyId)).map(_ => Results.NoContent)
+ }
+
+ def delete(taxoId: String): Action[AnyContent] =
+ entrypoint("delete taxonomy")
+ .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph =>
+ for {
+ taxo <-
+ taxonomySrv
+ .get(EntityIdOrName(taxoId))
+ .visible
+ .getOrFail("Taxonomy")
+ tags <- Try(taxonomySrv.get(taxo).tags.toSeq)
+ _ <- tags.toTry(t => tagSrv.delete(t))
+ _ <- taxonomySrv.delete(taxo)
+ } yield Results.NoContent
+ }
+
+}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala
new file mode 100644
index 0000000000..b5b45a8b89
--- /dev/null
+++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala
@@ -0,0 +1,43 @@
+package org.thp.thehive.controllers.v1
+
+import org.thp.scalligraph.traversal.TraversalOps._
+import org.thp.scalligraph.traversal.{Converter, Traversal}
+import org.thp.thehive.models.Taxonomy
+import org.thp.thehive.services.TaxonomyOps._
+import play.api.libs.json._
+
+import java.util.{Map => JMap}
+
+trait TaxonomyRenderer {
+
+ def enabledStats: Traversal.V[Taxonomy] => Traversal[JsValue, Boolean, Converter[JsValue, Boolean]] =
+ _.enabled.domainMap(l => JsBoolean(l))
+
+ def taxoStatsRenderer(extraData: Set[String]):
+ Traversal.V[Taxonomy] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal =>
+ def addData[G](
+ name: String
+ )(f: Traversal.V[Taxonomy] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[
+ JsObject,
+ JMap[String, Any]
+ ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t =>
+ val dataTraversal = f(traversal.start)
+ t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap =>
+ t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G]))
+ }
+ }
+
+ if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty)
+ else {
+ val dataName = extraData.toSeq
+ dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]](
+ traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ =>
+ JsObject.empty
+ )
+ ) {
+ case (f, "enabled") => addData("enabled")(enabledStats)(f)
+ case (f, _) => f
+ }
+ }
+ }
+}
diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala
index 434c2f749b..b46fdea04d 100644
--- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala
+++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala
@@ -30,9 +30,13 @@ class TheHiveQueryExecutor @Inject() (
observableCtrl: ObservableCtrl,
observableTypeCtrl: ObservableTypeCtrl,
organisationCtrl: OrganisationCtrl,
+ patternCtrl: PatternCtrl,
+ procedureCtrl: ProcedureCtrl,
profileCtrl: ProfileCtrl,
+ shareCtrl: ShareCtrl,
taskCtrl: TaskCtrl,
userCtrl: UserCtrl,
+ taxonomyCtrl: TaxonomyCtrl,
// dashboardCtrl: DashboardCtrl,
properties: Properties,
implicit val db: Database
@@ -51,10 +55,14 @@ class TheHiveQueryExecutor @Inject() (
observableTypeCtrl,
organisationCtrl,
// pageCtrl,
+ patternCtrl,
+ procedureCtrl,
profileCtrl,
+ shareCtrl,
// tagCtrl,
taskCtrl,
- userCtrl
+ userCtrl,
+ taxonomyCtrl
)
override val version: (Int, Int) = 1 -> 1
diff --git a/thehive/app/org/thp/thehive/models/Case.scala b/thehive/app/org/thp/thehive/models/Case.scala
index 0efac856a1..8b0dfa31d8 100644
--- a/thehive/app/org/thp/thehive/models/Case.scala
+++ b/thehive/app/org/thp/thehive/models/Case.scala
@@ -77,6 +77,9 @@ case class CaseUser()
@BuildEdgeEntity[Case, CaseTemplate]
case class CaseCaseTemplate()
+@BuildEdgeEntity[Case, Procedure]
+case class CaseProcedure()
+
@BuildVertexEntity
@DefineIndex(IndexType.unique, "number")
@DefineIndex(IndexType.fulltext, "title")
diff --git a/thehive/app/org/thp/thehive/models/Organisation.scala b/thehive/app/org/thp/thehive/models/Organisation.scala
index 75ced4f207..aefa7efb23 100644
--- a/thehive/app/org/thp/thehive/models/Organisation.scala
+++ b/thehive/app/org/thp/thehive/models/Organisation.scala
@@ -20,6 +20,9 @@ case class OrganisationShare()
@BuildEdgeEntity[Organisation, Organisation]
case class OrganisationOrganisation()
+@BuildEdgeEntity[Organisation, Taxonomy]
+case class OrganisationTaxonomy()
+
case class RichOrganisation(organisation: Organisation with Entity, links: Seq[Organisation with Entity]) {
def name: String = organisation.name
def description: String = organisation.description
diff --git a/thehive/app/org/thp/thehive/models/Pattern.scala b/thehive/app/org/thp/thehive/models/Pattern.scala
new file mode 100644
index 0000000000..bb03474404
--- /dev/null
+++ b/thehive/app/org/thp/thehive/models/Pattern.scala
@@ -0,0 +1,39 @@
+package org.thp.thehive.models
+
+import org.thp.scalligraph.models.Entity
+import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId}
+
+import java.util.Date
+
+@BuildVertexEntity
+case class Pattern(
+ patternId: String,
+ name: String,
+ description: Option[String],
+ tactics: Set[String],
+ url: String,
+ patternType: String,
+ platforms: Seq[String],
+ dataSources: Seq[String],
+ revision: Option[String]
+)
+
+@BuildEdgeEntity[Pattern, Pattern]
+case class PatternPattern()
+
+case class RichPattern(pattern: Pattern with Entity, parent: Option[Pattern with Entity]) {
+ def patternId: String = pattern.patternId
+ def name: String = pattern.name
+ def description: Option[String] = pattern.description
+ def tactics: Set[String] = pattern.tactics
+ def url: String = pattern.url
+ def patternType: String = pattern.patternType
+ def platforms: Seq[String] = pattern.platforms
+ def dataSources: Seq[String] = pattern.dataSources
+ def version: Option[String] = pattern.revision
+ def _id: EntityId = pattern._id
+ def _createdAt: Date = pattern._createdAt
+ def _createdBy: String = pattern._createdBy
+ def _updatedAt: Option[Date] = pattern._updatedAt
+ def _updatedBy: Option[String] = pattern._updatedBy
+}
diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala
index 14b45cf5fc..2eca0a7ee6 100644
--- a/thehive/app/org/thp/thehive/models/Permissions.scala
+++ b/thehive/app/org/thp/thehive/models/Permissions.scala
@@ -3,45 +3,51 @@ package org.thp.thehive.models
import org.thp.scalligraph.auth.{Permission, PermissionDesc, Permissions => Perms}
object Permissions extends Perms {
- lazy val manageCase: PermissionDesc = PermissionDesc("manageCase", "Manage cases", "organisation")
- lazy val manageObservable: PermissionDesc = PermissionDesc("manageObservable", "Manage observables", "organisation")
+ lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation")
+ lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation")
lazy val manageAlert: PermissionDesc = PermissionDesc("manageAlert", "Manage alerts", "organisation")
- lazy val manageUser: PermissionDesc = PermissionDesc("manageUser", "Manage users", "organisation", "admin")
- lazy val manageOrganisation: PermissionDesc = PermissionDesc("manageOrganisation", "Manage organisations", "admin")
- lazy val manageCaseTemplate: PermissionDesc = PermissionDesc("manageCaseTemplate", "Manage case templates", "organisation")
+ lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation")
lazy val manageAnalyzerTemplate: PermissionDesc = PermissionDesc("manageAnalyzerTemplate", "Manage analyzer templates", "admin")
- lazy val manageTask: PermissionDesc = PermissionDesc("manageTask", "Manage tasks", "organisation")
- lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation")
+ lazy val manageCase: PermissionDesc = PermissionDesc("manageCase", "Manage cases", "organisation")
+ lazy val manageCaseTemplate: PermissionDesc = PermissionDesc("manageCaseTemplate", "Manage case templates", "organisation")
lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin")
- lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin")
- lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin")
lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin")
- lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation")
- lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation")
- lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation")
+ lazy val manageObservable: PermissionDesc = PermissionDesc("manageObservable", "Manage observables", "organisation")
lazy val manageObservableTemplate: PermissionDesc = PermissionDesc("manageObservableTemplate", "Manage observable types", "admin")
- lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation")
+ lazy val manageOrganisation: PermissionDesc = PermissionDesc("manageOrganisation", "Manage organisations", "admin")
+ lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation")
+ lazy val managePattern: PermissionDesc = PermissionDesc("managePattern", "Manage patterns", "admin")
+ lazy val manageProcedure: PermissionDesc = PermissionDesc("manageProcedure", "Manage procedures", "organisation")
+ lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin")
+ lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation")
+ lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin")
+ lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin")
+ lazy val manageTask: PermissionDesc = PermissionDesc("manageTask", "Manage tasks", "organisation")
+ lazy val manageUser: PermissionDesc = PermissionDesc("manageUser", "Manage users", "organisation", "admin")
lazy val list: Set[PermissionDesc] =
Set(
- manageCase,
- manageObservable,
+ accessTheHiveFS,
+ manageAction,
manageAlert,
- manageUser,
- manageOrganisation,
- manageCaseTemplate,
+ manageAnalyse,
manageAnalyzerTemplate,
- manageTask,
- manageAction,
+ manageCase,
+ manageCaseTemplate,
manageConfig,
- manageProfile,
- manageTag,
manageCustomField,
- manageShare,
- manageAnalyse,
- managePage,
+ manageObservable,
manageObservableTemplate,
- accessTheHiveFS
+ manageOrganisation,
+ managePage,
+ managePattern,
+ manageProcedure,
+ manageProfile,
+ manageShare,
+ manageTag,
+ manageTask,
+ manageTaxonomy,
+ manageUser
)
// These permissions are available only if the user is in admin organisation, they are removed for other organisations
diff --git a/thehive/app/org/thp/thehive/models/Procedure.scala b/thehive/app/org/thp/thehive/models/Procedure.scala
new file mode 100644
index 0000000000..8d587f69d2
--- /dev/null
+++ b/thehive/app/org/thp/thehive/models/Procedure.scala
@@ -0,0 +1,27 @@
+package org.thp.thehive.models
+
+import org.thp.scalligraph.models.Entity
+import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId}
+
+import java.util.Date
+
+@BuildVertexEntity
+case class Procedure(
+ description: String,
+ occurence: Date
+ // metadata
+)
+
+@BuildEdgeEntity[Procedure, Pattern]
+case class ProcedurePattern()
+
+case class RichProcedure(procedure: Procedure with Entity, pattern: Pattern with Entity) {
+ def description: String = procedure.description
+ def occurence: Date = procedure.occurence
+ def _id: EntityId = procedure._id
+ def _createdAt: Date = procedure._createdAt
+ def _createdBy: String = procedure._createdBy
+ def _updatedAt: Option[Date] = procedure._updatedAt
+ def _updatedBy: Option[String] = procedure._updatedBy
+
+}
diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala
index e188ee45c2..ee1264f61e 100644
--- a/thehive/app/org/thp/thehive/models/Tag.scala
+++ b/thehive/app/org/thp/thehive/models/Tag.scala
@@ -4,7 +4,6 @@ import org.thp.scalligraph.BuildVertexEntity
import org.thp.scalligraph.models.{DefineIndex, IndexType}
import play.api.Logger
-import scala.util.Try
import scala.util.matching.Regex
@DefineIndex(IndexType.unique, "namespace", "predicate", "value")
@@ -14,7 +13,7 @@ case class Tag(
predicate: String,
value: Option[String],
description: Option[String],
- colour: Int
+ colour: String
) {
override def hashCode(): Int = 31 * (31 * value.## + predicate.##) + namespace.##
@@ -33,15 +32,15 @@ case class Tag(
object Tag {
lazy val logger: Logger = Logger(getClass)
- val tagColour: Regex = "(.*)#(\\p{XDigit}{6})".r
+ val tagColour: Regex = "(.*)(#\\p{XDigit}{6})".r
val namespacePredicateValue: Regex = "([^\".:=]+)[.:]([^\".=]+)=\"?([^\"]+)\"?".r
val namespacePredicate: Regex = "([^\".:=]+)[.]([^\".=]+)".r
val PredicateValue: Regex = "([^\".:=]+)[=:]\"?([^\"]+)\"?".r
val predicate: Regex = "([^\".:=]+)".r
- def fromString(tagName: String, defaultNamespace: String, defaultColour: Int = 0): Tag = {
+ def fromString(tagName: String, defaultNamespace: String, defaultColour: String = "#000000"): Tag = {
val (name, colour) = tagName match {
- case tagColour(n, c) => n -> Try(Integer.parseUnsignedInt(c, 16)).getOrElse(defaultColour)
+ case tagColour(n, c) => n -> c
case _ => tagName -> defaultColour
}
name match {
diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala
new file mode 100644
index 0000000000..b677c54230
--- /dev/null
+++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala
@@ -0,0 +1,30 @@
+package org.thp.thehive.models
+
+import org.thp.scalligraph.models.Entity
+import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId}
+
+import java.util.Date
+
+@BuildVertexEntity
+case class Taxonomy(
+ namespace: String,
+ description: String,
+ version: Int
+)
+
+@BuildEdgeEntity[Taxonomy, Tag]
+case class TaxonomyTag()
+
+case class RichTaxonomy(
+ taxonomy: Taxonomy with Entity,
+ tags: Seq[Tag]
+) {
+ def _id: EntityId = taxonomy._id
+ def _createdBy: String = taxonomy._createdBy
+ def _updatedBy: Option[String] = taxonomy._updatedBy
+ def _createdAt: Date = taxonomy._createdAt
+ def _updatedAt: Option[Date] = taxonomy._updatedAt
+ def namespace: String = taxonomy.namespace
+ def description: String = taxonomy.description
+ def version: Int = taxonomy.version
+}
diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala
index 257d383666..d33a3dcf3c 100644
--- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala
+++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala
@@ -7,16 +7,17 @@ import org.janusgraph.graphdb.types.TypeDefinitionCategory
import org.reflections.Reflections
import org.reflections.scanners.SubTypesScanner
import org.reflections.util.ConfigurationBuilder
-import org.thp.scalligraph.EntityId
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.janus.JanusDatabase
import org.thp.scalligraph.models._
import org.thp.scalligraph.traversal.TraversalOps._
-import org.thp.scalligraph.traversal.{Converter, Graph}
+import org.thp.scalligraph.traversal.{Converter, Graph, Traversal}
+import org.thp.scalligraph.{EntityId, RichSeq}
import org.thp.thehive.services.LocalUserSrv
import play.api.Logger
import java.lang.reflect.Modifier
+import java.util.Date
import javax.inject.{Inject, Singleton}
import scala.collection.JavaConverters._
import scala.reflect.runtime.{universe => ru}
@@ -55,6 +56,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema {
case error => logger.warn(s"Unable to remove lock on property $name: $error")
}
}
+ // TODO remove unused commented code ?
// def removeIndexLock(name: String): Try[Unit] =
// db.managementTransaction { mgmt =>
// Try(mgmt.setConsistency(mgmt.getGraphIndex(name), ConsistencyModifier.DEFAULT))
@@ -92,6 +94,84 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema {
Success(())
}
//=====[release 4.0.3]=====
+ // Taxonomies
+ .addVertexModel[String]("Taxonomy", Seq("namespace"))
+ .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db =>
+ db.tryTransaction { implicit g =>
+ // For each organisation, if there is no custom taxonomy, create it
+ db.labelFilter("Organisation", Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o =>
+ Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", s"_freetags_${o.id()}").headOption match {
+ case None =>
+ val taxoVertex = g.addVertex("Taxonomy")
+ taxoVertex.property("_label", "Taxonomy")
+ taxoVertex.property("_createdBy", "system@thehive.local")
+ taxoVertex.property("_createdAt", new Date())
+ taxoVertex.property("namespace", s"_freetags_${o.id()}")
+ taxoVertex.property("description", "Custom taxonomy")
+ taxoVertex.property("version", 1)
+ o.addEdge("OrganisationTaxonomy", taxoVertex)
+ Success(())
+ case _ => Success(())
+ }
+ }
+ }.map(_ => ())
+ }
+ .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db =>
+ db.tryTransaction { implicit g =>
+ db.labelFilter("Organisation", Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o =>
+ val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", s"_freetags_${o.id()}").head
+ Traversal
+ .V(EntityId(o.id()))
+ .unionFlat(
+ _.out("OrganisationShare").out("ShareCase").out("CaseTag"),
+ _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"),
+ _.in("AlertOrganisation").out("AlertTag"),
+ _.in("CaseTemplateOrganisation").out("CaseTemplateTag")
+ )
+ .toSeq
+ .foreach { tag =>
+ // Create a freetext tag and store it into predicate
+ val tagStr = tagString(
+ tag.property("namespace").value().toString,
+ tag.property("predicate").value().toString,
+ tag.property("value").orElse("")
+ )
+ tag.property("namespace", s"_freetags_${o.id()}")
+ tag.property("predicate", tagStr)
+ tag.property("value").remove()
+ customTaxo.addEdge("TaxonomyTag", tag)
+ }
+ Success(())
+ }
+ }.map(_ => ())
+ }
+ .updateGraph("Add manageTaxonomy to admin profile", "Profile") { traversal =>
+ Try(traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTaxonomy").iterate())
+ Success(())
+ }
+ .updateGraph("Remove colour property for Tags", "Tag") { traversal =>
+ traversal.removeProperty("colour").iterate()
+ Success(())
+ }
+ .removeProperty("Tag", "colour", usedOnlyByThisModel = true)
+ .addProperty[String]("Tag", "colour")
+ .updateGraph("Add property colour for Tags ", "Tag") { traversal =>
+ traversal.raw.property("colour", "#000000").iterate()
+ Success(())
+ }
+ .updateGraph("Add managePattern permission to admin profile", "Profile") { traversal =>
+ traversal.unsafeHas("name", "admin").raw.property("permissions", "managePattern").iterate()
+ Success(())
+ }
+ .updateGraph("Add manageProcedure permission to org-admin and analyst profiles", "Profile") { traversal =>
+ traversal
+ .unsafeHas("name", P.within("org-admin", "analyst"))
+ .raw
+ .property("permissions", "manageProcedure")
+ .iterate()
+ Success(())
+ }
+ //=====[release 4.0.3]=====
/* Alert index */
.addProperty[Seq[String]]("Alert", "tags")
.addProperty[EntityId]("Alert", "organisationId")
@@ -295,6 +375,11 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema {
case vertexModel: VertexModel => vertexModel.getInitialValues
}.flatten
+ private def tagString(namespace: String, predicate: String, value: String): String =
+ (if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') +
+ (if (predicate.headOption.getOrElse('_') == '_') "" else predicate) +
+ (if (value.isEmpty) "" else f"""="$value"""")
+
override def init(db: Database)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = Success(())
override val authContext: AuthContext = LocalUserSrv.getSystemAuthContext
diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala
index 3ba30311da..d5fe9a70c8 100644
--- a/thehive/app/org/thp/thehive/services/AlertSrv.scala
+++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala
@@ -1,5 +1,6 @@
package org.thp.thehive.services
+import akka.actor.ActorRef
import org.apache.tinkerpop.gremlin.process.traversal.P
import org.thp.scalligraph.auth.{AuthContext, Permission}
import org.thp.scalligraph.controllers.FFile
@@ -9,7 +10,7 @@ import org.thp.scalligraph.query.PropertyUpdater
import org.thp.scalligraph.services._
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.traversal._
-import org.thp.scalligraph.{CreateError, EntityId, EntityIdOrName, RichOptionTry, RichSeq}
+import org.thp.scalligraph.{BadRequestError, CreateError, EntityId, EntityIdOrName, RichOptionTry, RichSeq}
import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.dto.v1.InputCustomFieldValue
import org.thp.thehive.models._
@@ -20,8 +21,9 @@ import org.thp.thehive.services.CustomFieldOps._
import org.thp.thehive.services.ObservableOps._
import play.api.libs.json.{JsObject, JsValue, Json}
+import java.lang.{Long => JLong}
import java.util.{Date, Map => JMap}
-import javax.inject.{Inject, Singleton}
+import javax.inject.{Inject, Named, Singleton}
import scala.util.{Failure, Success, Try}
@Singleton
@@ -33,7 +35,8 @@ class AlertSrv @Inject() (
caseTemplateSrv: CaseTemplateSrv,
observableSrv: ObservableSrv,
auditSrv: AuditSrv,
- attachmentSrv: AttachmentSrv
+ attachmentSrv: AttachmentSrv,
+ @Named("integrity-check-actor") integrityCheckActor: ActorRef
) extends VertexSrv[Alert] {
val alertTagSrv = new EdgeSrv[AlertTag, Alert, Tag]
@@ -272,6 +275,7 @@ class AlertSrv @Inject() (
_ <- importObservables(alert.alert, createdCase.`case`)
_ <- alertCaseSrv.create(AlertCase(), alert.alert, createdCase.`case`)
_ <- markAsRead(alert._id)
+ _ = integrityCheckActor ! EntityAdded("Alert")
} yield createdCase
}
}(richCase => auditSrv.`case`.create(richCase.`case`, richCase.toJson))
@@ -284,28 +288,32 @@ class AlertSrv @Inject() (
} yield updatedCase
def mergeInCase(alert: Alert with Entity, `case`: Case with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Case with Entity] =
- auditSrv
- .mergeAudits {
- // No audit for markAsRead and observables
- // Audits for customFields, description and tags
- val description = `case`.description + s"\n \n#### Merged with alert #${alert.sourceRef} ${alert.title}\n\n${alert.description.trim}"
- for {
- _ <- markAsRead(alert._id)
- _ <- importObservables(alert, `case`)
- _ <- importCustomFields(alert, `case`)
- _ <- caseSrv.addTags(`case`, alert.tags.toSet)
- _ <- alertCaseSrv.create(AlertCase(), alert, `case`)
- c <- caseSrv.get(`case`).update(_.description, description).getOrFail("Case")
- details <- Success(
- Json.obj(
- "customFields" -> get(alert).richCustomFields.toSeq.map(_.toOutput.toJson),
- "description" -> c.description,
- "tags" -> (`case`.tags ++ alert.tags).distinct
+ if (get(alert).isImported)
+ Failure(BadRequestError("Alert is already imported"))
+ else
+ auditSrv
+ .mergeAudits {
+ // No audit for markAsRead and observables
+ // Audits for customFields, description and tags
+ val description = `case`.description + s"\n \n#### Merged with alert #${alert.sourceRef} ${alert.title}\n\n${alert.description.trim}"
+ for {
+ _ <- markAsRead(alert._id)
+ _ <- importObservables(alert, `case`)
+ _ <- importCustomFields(alert, `case`)
+ _ <- caseSrv.addTags(`case`, alert.tags.toSet)
+ _ <- alertCaseSrv.create(AlertCase(), alert, `case`)
+ c <- caseSrv.get(`case`).update(_.description, description).getOrFail("Case")
+ details <- Success(
+ Json.obj(
+ "customFields" -> get(alert).richCustomFields.toSeq.map(_.toOutput.toJson),
+ "description" -> c.description,
+ "tags" -> (`case`.tags ++ alert.tags).distinct
+ )
)
- )
- } yield details
- }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details)))
- .flatMap(_ => caseSrv.getOrFail(`case`._id))
+ } yield details
+ }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details)))
+ .map(_ => integrityCheckActor ! EntityAdded("Alert"))
+ .flatMap(_ => caseSrv.getOrFail(`case`._id))
def importObservables(alert: Alert with Entity, `case`: Case with Entity)(implicit
graph: Graph,
@@ -406,6 +414,24 @@ object AlertOps {
def imported: Traversal[Boolean, Boolean, IdentityConverter[Boolean]] =
traversal.choose(_.has(_.caseId), onTrue = true, onFalse = false)
+ def isImported: Boolean =
+ traversal.has(_.caseId).exists
+
+ def importDate: Traversal[Date, Date, Converter[Date, Date]] =
+ traversal.outE[AlertCase].value(_._createdAt)
+
+ def handlingDuration: Traversal[Long, Long, IdentityConverter[Long]] =
+ traversal.coalesceIdent(
+ _.filter(_.outE[AlertCase])
+ .sack(
+ (_: JLong, importDate: JLong) => importDate,
+ _.by(_.importDate.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
+ )
+ .sack((_: Long) - (_: JLong), _.by(_._createdAt.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
+ .sack[Long],
+ _.constant(0L)
+ )
+
def similarCases(organisationSrv: OrganisationSrv, caseFilter: Option[Traversal.V[Case] => Traversal.V[Case]])(implicit
authContext: AuthContext
): Traversal[(RichCase, SimilarStats), JMap[String, Any], Converter[(RichCase, SimilarStats), JMap[String, Any]]] = {
@@ -559,3 +585,18 @@ object AlertOps {
implicit class AlertCustomFieldsOpsDefs(traversal: Traversal.E[AlertCustomField]) extends CustomFieldValueOpsDefs(traversal)
}
+
+class AlertIntegrityCheckOps @Inject() (@Named("with-thehive-schema") val db: Database, val service: AlertSrv) extends IntegrityCheckOps[Alert] {
+ override def check(): Unit = {
+ db.tryTransaction { implicit graph =>
+ service
+ .startTraversal
+ .flatMap(_.outE[AlertCase].range(1, 100))
+ .remove()
+ Success(())
+ }
+ ()
+ }
+
+ override def resolve(entities: Seq[Alert with Entity])(implicit graph: Graph): Try[Unit] = Success(())
+}
diff --git a/thehive/app/org/thp/thehive/services/AuditSrv.scala b/thehive/app/org/thp/thehive/services/AuditSrv.scala
index 9219af9616..84432e934b 100644
--- a/thehive/app/org/thp/thehive/services/AuditSrv.scala
+++ b/thehive/app/org/thp/thehive/services/AuditSrv.scala
@@ -37,25 +37,27 @@ class AuditSrv @Inject() (
db: Database
) extends VertexSrv[Audit] { auditSrv =>
lazy val userSrv: UserSrv = userSrvProvider.get
- val auditUserSrv = new EdgeSrv[AuditUser, Audit, User]
+ val alert = new SelfContextObjectAudit[Alert]
+ val alertToCase = new ObjectAudit[Alert, Case]
val auditedSrv = new EdgeSrv[Audited, Audit, Product]
val auditContextSrv = new EdgeSrv[AuditContext, Audit, Product]
+ val auditUserSrv = new EdgeSrv[AuditUser, Audit, User]
val `case` = new SelfContextObjectAudit[Case]
- val task = new SelfContextObjectAudit[Task]
- val observable = new SelfContextObjectAudit[Observable]
- val log = new ObjectAudit[Log, Task]
val caseTemplate = new SelfContextObjectAudit[CaseTemplate]
- val taskInTemplate = new ObjectAudit[Task, CaseTemplate]
- val alert = new SelfContextObjectAudit[Alert]
- val alertToCase = new ObjectAudit[Alert, Case]
- val share = new ShareAudit
- val observableInAlert = new ObjectAudit[Observable, Alert]
- val user = new UserAudit
+ val customField = new SelfContextObjectAudit[CustomField]
val dashboard = new SelfContextObjectAudit[Dashboard]
+ val log = new ObjectAudit[Log, Task]
+ val observable = new SelfContextObjectAudit[Observable]
+ val observableInAlert = new ObjectAudit[Observable, Alert]
val organisation = new SelfContextObjectAudit[Organisation]
- val profile = new SelfContextObjectAudit[Profile]
- val customField = new SelfContextObjectAudit[CustomField]
val page = new SelfContextObjectAudit[Page]
+ val pattern = new SelfContextObjectAudit[Pattern]
+ val procedure = new ObjectAudit[Procedure, Case]
+ val profile = new SelfContextObjectAudit[Profile]
+ val share = new ShareAudit
+ val task = new SelfContextObjectAudit[Task]
+ val taskInTemplate = new ObjectAudit[Task, CaseTemplate]
+ val user = new UserAudit
private val pendingAuditsLock = new Object
private val transactionAuditIdsLock = new Object
private val unauditedTransactionsLock = new Object
diff --git a/thehive/app/org/thp/thehive/services/CaseSrv.scala b/thehive/app/org/thp/thehive/services/CaseSrv.scala
index 0700d535b7..4a36bb6c88 100644
--- a/thehive/app/org/thp/thehive/services/CaseSrv.scala
+++ b/thehive/app/org/thp/thehive/services/CaseSrv.scala
@@ -10,8 +10,8 @@ import org.thp.scalligraph.query.PredicateOps.PredicateOpsDefs
import org.thp.scalligraph.query.PropertyUpdater
import org.thp.scalligraph.services._
import org.thp.scalligraph.traversal.TraversalOps._
-import org.thp.scalligraph.traversal.{Converter, Graph, StepLabel, Traversal}
-import org.thp.scalligraph.{CreateError, EntityId, EntityIdOrName, EntityName, RichOptionTry, RichSeq}
+import org.thp.scalligraph.traversal._
+import org.thp.scalligraph.{EntityId, EntityIdOrName, EntityName, RichOptionTry, RichSeq}
import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.dto.v1.InputCustomFieldValue
import org.thp.thehive.models._
@@ -25,9 +25,10 @@ import org.thp.thehive.services.UserOps._
import play.api.cache.SyncCacheApi
import play.api.libs.json.{JsNull, JsObject, JsValue, Json}
+import java.lang.{Long => JLong}
import java.util.{Date, List => JList, Map => JMap}
import javax.inject.{Inject, Named, Singleton}
-import scala.util.{Failure, Success, Try}
+import scala.util.{Success, Try}
@Singleton
class CaseSrv @Inject() (
@@ -576,12 +577,23 @@ object CaseOps {
def alert: Traversal.V[Alert] = traversal.in[AlertCase].v[Alert]
+ def procedure: Traversal.V[Procedure] = traversal.out[CaseProcedure].v[Procedure]
+
def isActionRequired(implicit authContext: AuthContext): Traversal[Boolean, Boolean, Converter.Identity[Boolean]] =
traversal.choose(_.share(authContext).outE[ShareTask].has(_.actionRequired, true), true, false)
+ def handlingDuration: Traversal[Long, Long, IdentityConverter[Long]] =
+ traversal.coalesceIdent(
+ _.has(_.endDate)
+ .sack(
+ (_: JLong, importDate: JLong) => importDate,
+ _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))
+ )
+ .sack((_: Long) - (_: JLong), _.by(_._createdAt.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)))
+ .sack[Long],
+ _.constant(0L)
+ )
}
-
-// implicit class CaseCustomFieldsOpsDefs(traversal: Traversal.E[CaseCustomField]) extends CustomFieldValueOpsDefs(traversal)
}
class CaseIntegrityCheckOps @Inject() (val db: Database, val service: CaseSrv) extends IntegrityCheckOps[Case] {
diff --git a/thehive/app/org/thp/thehive/services/ObservableSrv.scala b/thehive/app/org/thp/thehive/services/ObservableSrv.scala
index 34b0fa7c45..6395072d8c 100644
--- a/thehive/app/org/thp/thehive/services/ObservableSrv.scala
+++ b/thehive/app/org/thp/thehive/services/ObservableSrv.scala
@@ -12,6 +12,7 @@ import org.thp.scalligraph.traversal.{Converter, Graph, StepLabel, Traversal}
import org.thp.scalligraph.utils.Hash
import org.thp.scalligraph.{BadRequestError, CreateError, EntityId, EntityIdOrName, EntityName, RichSeq}
import org.thp.thehive.models._
+import org.thp.thehive.services.AlertOps._
import org.thp.thehive.services.ObservableOps._
import org.thp.thehive.services.OrganisationOps._
import org.thp.thehive.services.ShareOps._
@@ -209,6 +210,12 @@ object ObservableOps {
else
traversal.empty
+ def canManage(organisationSrv: OrganisationSrv)(implicit authContext: AuthContext): Traversal.V[Observable] =
+ if (authContext.isPermitted(Permissions.manageAlert))
+ traversal.filter(_.or(_.alert.visible(organisationSrv), _.can(Permissions.manageObservable)))
+ else
+ can(Permissions.manageObservable)
+
def userPermissions(implicit authContext: AuthContext): Traversal[Set[Permission], Vertex, Converter[Set[Permission], Vertex]] =
traversal
.share(authContext.organisation)
diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala
index d2bc34cdcf..a3a45e34a2 100644
--- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala
+++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala
@@ -17,11 +17,12 @@ import play.api.cache.SyncCacheApi
import play.api.libs.json.JsObject
import java.util.{Map => JMap}
-import javax.inject.{Inject, Named, Singleton}
+import javax.inject.{Inject, Named, Provider, Singleton}
import scala.util.{Failure, Success, Try}
@Singleton
class OrganisationSrv @Inject() (
+ taxonomySrvProvider: Provider[TaxonomySrv],
roleSrv: RoleSrv,
profileSrv: ProfileSrv,
auditSrv: AuditSrv,
@@ -29,9 +30,10 @@ class OrganisationSrv @Inject() (
@Named("integrity-check-actor") integrityCheckActor: ActorRef,
cache: SyncCacheApi
) extends VertexSrv[Organisation] {
-
- val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation]
- val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share]
+ lazy val taxonomySrv: TaxonomySrv = taxonomySrvProvider.get
+ val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation]
+ val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share]
+ val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy]
override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = {
integrityCheckActor ! EntityAdded("Organisation")
@@ -46,11 +48,15 @@ class OrganisationSrv @Inject() (
_ <- roleSrv.create(user, createdOrganisation, profileSrv.orgAdmin)
} yield createdOrganisation
- def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] =
+ def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = {
+ val activeTaxos = getByName("admin").taxonomies.toSeq
for {
- createdOrganisation <- createEntity(e)
- _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson)
- } yield createdOrganisation
+ newOrga <- createEntity(e)
+ _ <- taxonomySrv.createFreetag(newOrga)
+ _ <- activeTaxos.toTry(t => organisationTaxonomySrv.create(OrganisationTaxonomy(), newOrga, t))
+ _ <- auditSrv.organisation.create(newOrga, newOrga.toJson)
+ } yield newOrga
+ }
def current(implicit graph: Graph, authContext: AuthContext): Traversal.V[Organisation] = get(authContext.organisation)
@@ -139,6 +145,8 @@ object OrganisationOps {
def shares: Traversal.V[Share] = traversal.out[OrganisationShare].v[Share]
+ def taxonomies: Traversal.V[Taxonomy] = traversal.out[OrganisationTaxonomy].v[Taxonomy]
+
def caseTemplates: Traversal.V[CaseTemplate] = traversal.in[CaseTemplateOrganisation].v[CaseTemplate]
def users(requiredPermission: Permission): Traversal.V[User] =
diff --git a/thehive/app/org/thp/thehive/services/PatternSrv.scala b/thehive/app/org/thp/thehive/services/PatternSrv.scala
new file mode 100644
index 0000000000..495dff1ef0
--- /dev/null
+++ b/thehive/app/org/thp/thehive/services/PatternSrv.scala
@@ -0,0 +1,77 @@
+package org.thp.thehive.services
+
+import org.thp.scalligraph.EntityIdOrName
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.models.{Database, Entity}
+import org.thp.scalligraph.services._
+import org.thp.scalligraph.traversal.TraversalOps._
+import org.thp.scalligraph.traversal.{Converter, Graph, Traversal}
+import org.thp.thehive.models._
+import org.thp.thehive.services.CaseOps._
+import org.thp.thehive.services.PatternOps._
+import org.thp.thehive.services.ProcedureOps._
+
+import java.util.{Map => JMap}
+import javax.inject.{Inject, Named, Singleton}
+import scala.util.{Success, Try}
+
+@Singleton
+class PatternSrv @Inject() (
+ auditSrv: AuditSrv,
+ caseSrv: CaseSrv,
+ organisationSrv: OrganisationSrv
+)(implicit
+ @Named("with-thehive-schema") db: Database
+) extends VertexSrv[Pattern] {
+ val patternPatternSrv = new EdgeSrv[PatternPattern, Pattern, Pattern]
+
+ def cannotBeParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit graph: Graph): Boolean =
+ child._id == parent._id || get(child).parent.getEntity(parent).exists
+
+ def setParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] =
+ if (cannotBeParent(child, parent)) Success(())
+ else patternPatternSrv.create(PatternPattern(), parent, child).map(_ => ())
+
+ override def getByName(name: String)(implicit graph: Graph): Traversal.V[Pattern] =
+ Try(startTraversal.getByPatternId(name)).getOrElse(startTraversal.limit(0))
+
+ def getCasePatterns(caseId: String)(implicit graph: Graph): Try[Seq[String]] =
+ for {
+ caze <- caseSrv.get(EntityIdOrName(caseId)).getOrFail("Case")
+ patterns = caseSrv.get(caze).procedure.pattern.richPattern.toSeq
+ } yield patterns.map(_.patternId)
+
+ def remove(pattern: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] =
+ for {
+ organisation <- organisationSrv.getOrFail(authContext.organisation)
+ _ <- auditSrv.pattern.delete(pattern, organisation)
+ } yield get(pattern).remove()
+
+}
+
+object PatternOps {
+ implicit class PatternOpsDefs(traversal: Traversal.V[Pattern]) {
+ def getByPatternId(patternId: String): Traversal.V[Pattern] = traversal.has(_.patternId, patternId)
+
+ def parent: Traversal.V[Pattern] =
+ traversal.in[PatternPattern].v[Pattern]
+
+ def procedure: Traversal.V[Procedure] =
+ traversal.in[ProcedurePattern].v[Procedure]
+
+ def alreadyImported(patternId: String): Boolean =
+ traversal.getByPatternId(patternId).exists
+
+ def richPattern: Traversal[RichPattern, JMap[String, Any], Converter[RichPattern, JMap[String, Any]]] =
+ traversal
+ .project(
+ _.by
+ .by(_.in[PatternPattern].v[Pattern].fold)
+ )
+ .domainMap {
+ case (pattern, parent) =>
+ RichPattern(pattern, parent.headOption)
+ }
+
+ }
+}
diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala
new file mode 100644
index 0000000000..9448706c3e
--- /dev/null
+++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala
@@ -0,0 +1,72 @@
+package org.thp.thehive.services
+
+import org.thp.scalligraph.EntityIdOrName
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.models.{Database, Entity}
+import org.thp.scalligraph.services._
+import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs
+import org.thp.scalligraph.traversal.{Converter, Graph, StepLabel, Traversal}
+import org.thp.thehive.controllers.v1.Conversion._
+import org.thp.thehive.models._
+import org.thp.thehive.services.ProcedureOps._
+
+import java.util.{Map => JMap}
+import javax.inject.{Inject, Named, Singleton}
+import scala.util.Try
+
+@Singleton
+class ProcedureSrv @Inject() (
+ auditSrv: AuditSrv,
+ caseSrv: CaseSrv,
+ organisationSrv: OrganisationSrv,
+ patternSrv: PatternSrv
+)(implicit
+ @Named("with-thehive-schema") db: Database
+) extends VertexSrv[Procedure] {
+ val caseProcedureSrv = new EdgeSrv[CaseProcedure, Case, Procedure]
+ val procedurePatternSrv = new EdgeSrv[ProcedurePattern, Procedure, Pattern]
+
+ def create(p: Procedure, caseId: String, patternId: String)(implicit graph: Graph, authContext: AuthContext): Try[RichProcedure] =
+ for {
+ caze <- caseSrv.getOrFail(EntityIdOrName(caseId))
+ pattern <- patternSrv.getOrFail(EntityIdOrName(patternId))
+ procedure <- createEntity(p)
+ _ <- caseProcedureSrv.create(CaseProcedure(), caze, procedure)
+ _ <- procedurePatternSrv.create(ProcedurePattern(), procedure, pattern)
+ richProcedure = RichProcedure(procedure, pattern)
+ _ <- auditSrv.procedure.create(procedure, caze, richProcedure.toJson)
+ } yield richProcedure
+
+ override def get(idOrName: EntityIdOrName)(implicit graph: Graph): Traversal.V[Procedure] =
+ idOrName.fold(getByIds(_), _ => startTraversal.limit(0))
+
+ def remove(procedure: Procedure with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] =
+ for {
+ caze <- get(procedure).caze.getOrFail("Case")
+ _ <- auditSrv.procedure.delete(procedure, caze)
+ } yield get(procedure).remove()
+
+}
+
+object ProcedureOps {
+ implicit class ProcedureOpsDefs(traversal: Traversal.V[Procedure]) {
+
+ def pattern: Traversal.V[Pattern] =
+ traversal.out[ProcedurePattern].v[Pattern]
+
+ def caze: Traversal.V[Case] =
+ traversal.in[CaseProcedure].v[Case]
+
+ def richProcedure: Traversal[RichProcedure, JMap[String, Any], Converter[RichProcedure, JMap[String, Any]]] = {
+ val procedure = StepLabel.v[Procedure]
+ val pattern = StepLabel.v[Pattern]
+ traversal
+ .as(procedure)
+ .in[ProcedurePattern]
+ .v[Pattern]
+ .as(pattern)
+ .select((procedure, pattern))
+ .domainMap { case (procedure, pattern) => RichProcedure(procedure, pattern) }
+ }
+ }
+}
diff --git a/thehive/app/org/thp/thehive/services/ShareSrv.scala b/thehive/app/org/thp/thehive/services/ShareSrv.scala
index 1fd893d33a..915cc8b9b6 100644
--- a/thehive/app/org/thp/thehive/services/ShareSrv.scala
+++ b/thehive/app/org/thp/thehive/services/ShareSrv.scala
@@ -339,6 +339,8 @@ object ShareOps {
def organisation: Traversal.V[Organisation] = traversal.in[OrganisationShare].v[Organisation]
+ def visible(implicit authContext: AuthContext): Traversal.V[Share] = traversal.filter(_.organisation.visible)
+
def tasks: Traversal.V[Task] = traversal.out[ShareTask].v[Task]
def byTask(taskId: EntityIdOrName): Traversal.V[Share] =
diff --git a/thehive/app/org/thp/thehive/services/TagSrv.scala b/thehive/app/org/thp/thehive/services/TagSrv.scala
index 4690e231f5..850f72a452 100644
--- a/thehive/app/org/thp/thehive/services/TagSrv.scala
+++ b/thehive/app/org/thp/thehive/services/TagSrv.scala
@@ -17,26 +17,20 @@ import scala.util.{Success, Try}
@Singleton
class TagSrv @Inject() (appConfig: ApplicationConfig, @Named("integrity-check-actor") integrityCheckActor: ActorRef) extends VertexSrv[Tag] {
- val autoCreateConfig: ConfigItem[Boolean, Boolean] =
+ private val autoCreateConfig: ConfigItem[Boolean, Boolean] =
appConfig.item[Boolean]("tags.autocreate", "If true, create automatically tag if it doesn't exist")
def autoCreate: Boolean = autoCreateConfig.get
- val defaultNamespaceConfig: ConfigItem[String, String] =
+ private val defaultNamespaceConfig: ConfigItem[String, String] =
appConfig.item[String]("tags.defaultNamespace", "Default namespace of the automatically created tags")
def defaultNamespace: String = defaultNamespaceConfig.get
- val defaultColourConfig: ConfigItem[String, Int] =
- appConfig.mapItem[String, Int](
- "tags.defaultColour",
- "Default colour of the automatically created tags",
- {
- case s if s(0) == '#' => Try(Integer.parseUnsignedInt(s.tail, 16)).getOrElse(defaultColour)
- case _ => defaultColour
- }
- )
- def defaultColour: Int = defaultColourConfig.get
+ private val defaultColourConfig: ConfigItem[String, String] =
+ appConfig.item[String]("tags.defaultColour", "Default colour of the automatically created tags")
+
+ def defaultColour: String = defaultColourConfig.get
def parseString(tagName: String): Tag =
Tag.fromString(tagName, defaultNamespace, defaultColour)
diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala
new file mode 100644
index 0000000000..f9fe9e3615
--- /dev/null
+++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala
@@ -0,0 +1,124 @@
+package org.thp.thehive.services
+
+import org.apache.tinkerpop.gremlin.process.traversal.TextP
+import org.thp.scalligraph.auth.AuthContext
+import org.thp.scalligraph.models.{Database, Entity}
+import org.thp.scalligraph.services.{EdgeSrv, VertexSrv}
+import org.thp.scalligraph.traversal.Converter.Identity
+import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs
+import org.thp.scalligraph.traversal.{Converter, Graph, Traversal}
+import org.thp.scalligraph.{BadRequestError, EntityId, EntityIdOrName, RichSeq}
+import org.thp.thehive.models._
+import org.thp.thehive.services.OrganisationOps._
+import org.thp.thehive.services.TaxonomyOps._
+
+import java.util.{Map => JMap}
+import javax.inject.{Inject, Named, Singleton}
+import scala.util.{Failure, Success, Try}
+
+@Singleton
+class TaxonomySrv @Inject() (
+ organisationSrv: OrganisationSrv
+)(implicit @Named("with-thehive-schema") db: Database)
+ extends VertexSrv[Taxonomy] {
+
+ val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag]
+ val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy]
+
+ def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] =
+ for {
+ taxonomy <- createEntity(taxo)
+ _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t))
+ richTaxonomy <- Try(RichTaxonomy(taxonomy, tags))
+ } yield richTaxonomy
+
+ def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = {
+ val customTaxo = Taxonomy(s"_freetags_${organisation._id}", "Custom taxonomy", 1)
+ for {
+ taxonomy <- createEntity(customTaxo)
+ richTaxonomy <- Try(RichTaxonomy(taxonomy, Seq()))
+ _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy)
+ } yield richTaxonomy
+ }
+
+ override def getByName(name: String)(implicit graph: Graph): Traversal.V[Taxonomy] =
+ Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0))
+
+ def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] =
+ for {
+ taxo <- get(taxonomyId).getOrFail("Taxonomy")
+ _ <-
+ if (taxo.namespace.startsWith("_freetags")) Failure(BadRequestError("Cannot activate a freetags taxonomy"))
+ else Success(())
+ _ <-
+ organisationSrv
+ .startTraversal
+ .filterNot(_.out[OrganisationTaxonomy].v[Taxonomy].has(_.namespace, taxo.namespace))
+ .toSeq
+ .toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo))
+ } yield ()
+
+ def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph): Try[Unit] =
+ for {
+ taxo <- getOrFail(taxonomyId)
+ _ <-
+ if (taxo.namespace.startsWith("_freetags")) Failure(BadRequestError("Cannot deactivate a freetags taxonomy"))
+ else Success(())
+ } yield get(taxonomyId).inE[OrganisationTaxonomy].remove()
+
+}
+
+object TaxonomyOps {
+ implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) {
+
+ def get(idOrName: EntityId): Traversal.V[Taxonomy] =
+ idOrName.fold(traversal.getByIds(_), getByNamespace)
+
+ def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace)
+
+ def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] =
+ if (authContext.isPermitted(Permissions.manageTaxonomy))
+ noFreetags
+ else
+ traversal.filter(_.organisations.get(authContext.organisation))
+
+ private def noFreetags: Traversal.V[Taxonomy] =
+ traversal.filterNot(_.has(_.namespace, TextP.startingWith("_freetags")))
+
+ def alreadyImported(namespace: String): Boolean =
+ traversal.getByNamespace(namespace).exists
+
+ def organisations: Traversal.V[Organisation] = traversal.in[OrganisationTaxonomy].v[Organisation]
+
+ def enabled: Traversal[Boolean, Boolean, Identity[Boolean]] =
+ traversal.choose(_.organisations, true, false)
+
+ def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag]
+
+ def richTaxonomy: Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] =
+ traversal
+ .project(
+ _.by
+ .by(_.tags.fold)
+ )
+ .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) }
+
+ def richTaxonomyWithCustomRenderer[D, G, C <: Converter[D, G]](
+ entityRenderer: Traversal.V[Taxonomy] => Traversal[D, G, C]
+ ): Traversal[(RichTaxonomy, D), JMap[String, Any], Converter[(RichTaxonomy, D), JMap[String, Any]]] =
+ traversal
+ .project(
+ _.by
+ .by(_.tags.fold)
+ .by(_.enabled)
+ .by(entityRenderer)
+ )
+ .domainMap {
+ case (taxo, tags, _, renderedEntity) =>
+ RichTaxonomy(
+ taxo,
+ tags
+ ) -> renderedEntity
+ }
+ }
+}
diff --git a/thehive/app/org/thp/thehive/services/UserSrv.scala b/thehive/app/org/thp/thehive/services/UserSrv.scala
index 788b426ed9..9fffac1c10 100644
--- a/thehive/app/org/thp/thehive/services/UserSrv.scala
+++ b/thehive/app/org/thp/thehive/services/UserSrv.scala
@@ -40,11 +40,6 @@ class UserSrv @Inject() (
val userAttachmentSrv = new EdgeSrv[UserAttachment, User, Attachment]
- override def createEntity(e: User)(implicit graph: Graph, authContext: AuthContext): Try[User with Entity] = {
- integrityCheckActor ! EntityAdded("User")
- super.createEntity(e)
- }
-
def checkUser(user: User): Try[User] = {
val login =
if (!user.login.contains('@') && defaultUserDomain.isDefined) s"${user.login}@${defaultUserDomain.get}".toLowerCase
@@ -63,6 +58,7 @@ class UserSrv @Inject() (
roleSrv.create(user, organisation, profile)
else
Success(())).flatMap { _ =>
+ integrityCheckActor ! EntityAdded("User")
for {
richUser <- get(user).richUser(authContext, organisation._id).getOrFail("User")
_ <- auditSrv.user.create(user, richUser.toJson)
diff --git a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala
index 05791af334..da4058772e 100644
--- a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala
+++ b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala
@@ -182,7 +182,8 @@ case class AggAvg(aggName: Option[String], fieldName: String) extends Aggregatio
property
.select(fieldPath, t, authContext)
.mean
- .domainMap(avg => Output(Json.obj(name -> avg.asInstanceOf[Double]))),
+ .domainMap(avg => Output(Json.obj(name -> avg)))
+ .asInstanceOf[Traversal.Domain[Output[_]]],
Output(Json.obj(name -> JsNull))
)
}
diff --git a/thehive/conf/reference.conf b/thehive/conf/reference.conf
index 403e23198c..523fb7d7d1 100644
--- a/thehive/conf/reference.conf
+++ b/thehive/conf/reference.conf
@@ -163,6 +163,10 @@ integrityCheck {
initialDelay: 1 minute
interval: 10 minutes
}
+ alert {
+ initialDelay: 5 minute
+ interval: 30 minutes
+ }
}
diff --git a/thehive/test/org/thp/thehive/DatabaseBuilder.scala b/thehive/test/org/thp/thehive/DatabaseBuilder.scala
index 73aa337d83..cd9d796a0b 100644
--- a/thehive/test/org/thp/thehive/DatabaseBuilder.scala
+++ b/thehive/test/org/thp/thehive/DatabaseBuilder.scala
@@ -1,7 +1,5 @@
package org.thp.thehive
-import java.io.File
-import javax.inject.{Inject, Singleton}
import org.scalactic.Or
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.controllers._
@@ -14,6 +12,8 @@ import org.thp.thehive.services._
import play.api.Logger
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
+import java.io.File
+import javax.inject.{Inject, Singleton}
import scala.io.Source
import scala.reflect.runtime.{universe => ru}
import scala.util.{Failure, Success, Try}
@@ -21,27 +21,30 @@ import scala.util.{Failure, Success, Try}
@Singleton
class DatabaseBuilder @Inject() (
schema: Schema,
- userSrv: UserSrv,
- organisationSrv: OrganisationSrv,
- profileSrv: ProfileSrv,
+ alertSrv: AlertSrv,
+ attachmentSrv: AttachmentSrv,
caseSrv: CaseSrv,
- customFieldSrv: CustomFieldSrv,
caseTemplateSrv: CaseTemplateSrv,
+ customFieldSrv: CustomFieldSrv,
+ dashboardSrv: DashboardSrv,
+ dataSrv: DataSrv,
impactStatusSrv: ImpactStatusSrv,
- resolutionStatusSrv: ResolutionStatusSrv,
- shareSrv: ShareSrv,
- roleSrv: RoleSrv,
- observableSrv: ObservableSrv,
- observableTypeSrv: ObservableTypeSrv,
- taskSrv: TaskSrv,
- tagSrv: TagSrv,
keyValueSrv: KeyValueSrv,
- dataSrv: DataSrv,
logSrv: LogSrv,
- alertSrv: AlertSrv,
- attachmentSrv: AttachmentSrv,
- dashboardSrv: DashboardSrv,
+ observableSrv: ObservableSrv,
+ observableTypeSrv: ObservableTypeSrv,
+ organisationSrv: OrganisationSrv,
pageSrv: PageSrv,
+ patternSrv: PatternSrv,
+ procedureSrv: ProcedureSrv,
+ profileSrv: ProfileSrv,
+ resolutionStatusSrv: ResolutionStatusSrv,
+ roleSrv: RoleSrv,
+ shareSrv: ShareSrv,
+ tagSrv: TagSrv,
+ taskSrv: TaskSrv,
+ taxonomySrv: TaxonomySrv,
+ userSrv: UserSrv,
integrityChecks: Set[GenIntegrityCheckOps]
) {
@@ -63,29 +66,35 @@ class DatabaseBuilder @Inject() (
db.tryTransaction { implicit graph =>
val idMap =
createVertex(caseSrv, FieldsParser[Case]) ++
- createVertex(userSrv, FieldsParser[User]) ++
- createVertex(customFieldSrv, FieldsParser[CustomField]) ++
- createVertex(organisationSrv, FieldsParser[Organisation]) ++
+ createVertex(alertSrv, FieldsParser[Alert]) ++
+ createVertex(attachmentSrv, FieldsParser[Attachment]) ++
createVertex(caseTemplateSrv, FieldsParser[CaseTemplate]) ++
- createVertex(shareSrv, FieldsParser[Share]) ++
- createVertex(roleSrv, FieldsParser[Role]) ++
- createVertex(profileSrv, FieldsParser[Profile]) ++
- createVertex(observableSrv, FieldsParser[Observable]) ++
- createVertex(observableTypeSrv, FieldsParser[ObservableType]) ++
- createVertex(taskSrv, FieldsParser[Task]) ++
- createVertex(keyValueSrv, FieldsParser[KeyValue]) ++
+ createVertex(customFieldSrv, FieldsParser[CustomField]) ++
+ createVertex(dashboardSrv, FieldsParser[Dashboard]) ++
createVertex(dataSrv, FieldsParser[Data]) ++
+ createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++
+ createVertex(keyValueSrv, FieldsParser[KeyValue]) ++
createVertex(logSrv, FieldsParser[Log]) ++
- createVertex(alertSrv, FieldsParser[Alert]) ++
+ createVertex(observableSrv, FieldsParser[Observable]) ++
+ createVertex(observableTypeSrv, FieldsParser[ObservableType]) ++
+ createVertex(organisationSrv, FieldsParser[Organisation]) ++
+ createVertex(pageSrv, FieldsParser[Page]) ++
+ createVertex(patternSrv, FieldsParser[Pattern]) ++
+ createVertex(procedureSrv, FieldsParser[Procedure]) ++
+ createVertex(profileSrv, FieldsParser[Profile]) ++
createVertex(resolutionStatusSrv, FieldsParser[ResolutionStatus]) ++
- createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++
- createVertex(attachmentSrv, FieldsParser[Attachment]) ++
+ createVertex(roleSrv, FieldsParser[Role]) ++
+ createVertex(shareSrv, FieldsParser[Share]) ++
createVertex(tagSrv, FieldsParser[Tag]) ++
- createVertex(pageSrv, FieldsParser[Page]) ++
- createVertex(dashboardSrv, FieldsParser[Dashboard])
+ createVertex(taskSrv, FieldsParser[Task]) ++
+ createVertex(taxonomySrv, FieldsParser[Taxonomy]) ++
+ createVertex(userSrv, FieldsParser[User])
createEdge(organisationSrv.organisationOrganisationSrv, organisationSrv, organisationSrv, FieldsParser[OrganisationOrganisation], idMap)
createEdge(organisationSrv.organisationShareSrv, organisationSrv, shareSrv, FieldsParser[OrganisationShare], idMap)
+ createEdge(organisationSrv.organisationTaxonomySrv, organisationSrv, taxonomySrv, FieldsParser[OrganisationTaxonomy], idMap)
+
+ createEdge(taxonomySrv.taxonomyTagSrv, taxonomySrv, tagSrv, FieldsParser[TaxonomyTag], idMap)
createEdge(roleSrv.userRoleSrv, userSrv, roleSrv, FieldsParser[UserRole], idMap)
@@ -130,6 +139,12 @@ class DatabaseBuilder @Inject() (
createEdge(dashboardSrv.dashboardUserSrv, dashboardSrv, userSrv, FieldsParser[DashboardUser], idMap)
createEdge(dashboardSrv.organisationDashboardSrv, organisationSrv, dashboardSrv, FieldsParser[OrganisationDashboard], idMap)
+
+ createEdge(patternSrv.patternPatternSrv, patternSrv, patternSrv, FieldsParser[PatternPattern], idMap)
+
+ createEdge(procedureSrv.caseProcedureSrv, caseSrv, procedureSrv, FieldsParser[CaseProcedure], idMap)
+ createEdge(procedureSrv.procedurePatternSrv, procedureSrv, patternSrv, FieldsParser[ProcedurePattern], idMap)
+
Success(())
}
}
diff --git a/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala
index 46caf3274b..17e88bb138 100644
--- a/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala
+++ b/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala
@@ -6,6 +6,9 @@ import play.api.libs.json.{JsObject, Json}
import play.api.test.{FakeRequest, PlaySpecification}
class ConfigCtrlTest extends PlaySpecification with TestAppBuilder {
+
+// TODO leave unused code ?
+//
// def getList = {
// val request = FakeRequest("GET", "/api/config")
// .withHeaders("user" -> "admin@thehive.local")
@@ -36,9 +39,10 @@ class ConfigCtrlTest extends PlaySpecification with TestAppBuilder {
status(result) must equalTo(204).updateMessage(s => s"$s\n${contentAsString(result)}")
- app[TagSrv].defaultColour must beEqualTo(0xff00)
+ app[TagSrv].defaultColour must beEqualTo("#00FF00")
}
-
+// TODO leave unused tests ?
+//
// "get user specific configuration" in testApp { app =>
// val request = FakeRequest("GET", "/api/config/user/organisation")
// .withHeaders("user" -> "admin@thehive.local")
diff --git a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala
index c76dc02c1a..b46290e51d 100644
--- a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala
+++ b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala
@@ -1,9 +1,5 @@
package org.thp.thehive.controllers.v0
-import java.io.File
-import java.nio.file.{Path, Files => JFiles}
-import java.util.UUID
-
import akka.stream.Materializer
import io.scalaland.chimney.dsl._
import org.thp.scalligraph.AppBuilder
@@ -23,6 +19,10 @@ import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc.{AnyContentAsMultipartFormData, Headers, MultipartFormData}
import play.api.test.{FakeRequest, NoTemporaryFileCreator, PlaySpecification}
+import java.io.File
+import java.nio.file.{Path, Files => JFiles}
+import java.util.UUID
+
case class TestObservable(
dataType: String,
data: Option[String] = None,
@@ -57,7 +57,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
"data":["multi","line","test"]
}
""".stripMargin))
- val result = app[ObservableCtrl].create("1")(request)
+ val result = app[ObservableCtrl].createInCase("1")(request)
status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
val createdObservables = contentAsJson(result).as[Seq[OutputObservable]]
@@ -84,7 +84,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
"data":["observable", "in", "array"]
}
""".stripMargin))
- val result = app[ObservableCtrl].create("1")(request)
+ val result = app[ObservableCtrl].createInCase("1")(request)
status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
@@ -139,9 +139,8 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
resSearchObservables.flatMap(_.data) must contain(exactly("observable", "in", "array", "h.fr"))
}
- "be able to create and get 2 observables with string data and attachment" in testApp { app =>
+ "be able to create and get 2 observables with string data" in testApp { app =>
WithFakeTemporaryFile { tempFile =>
- val hashes = Hasher(app.apply[Configuration].get[Seq[String]]("attachment.hash"): _*).fromPath(tempFile.path).map(_.toString)
val files = Seq(FilePart("attachment", "myfile.txt", Some("text/plain"), tempFile))
val dataParts = Map("_json" -> Seq("""
{
@@ -160,24 +159,70 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
Headers("user" -> "certuser@thehive.local"),
body = AnyContentAsMultipartFormData(MultipartFormData(dataParts, files, Nil))
)
- val result = app[ObservableCtrl].create("1")(request)
+ val result = app[ObservableCtrl].createInCase("1")(request)
status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
val createdObservables = contentAsJson(result).as[Seq[OutputObservable]]
- createdObservables must have size 3
+ createdObservables must have size 2
createdObservables.map(_.dataType) must contain(be_==("ip")).forall
createdObservables.flatMap(_.data) must contain(exactly("127.0.0.1", "127.0.0.2"))
createdObservables.map(_.sighted) must contain(beFalse).forall
createdObservables.map(_.message) must contain(beSome("localhost")).forall
createdObservables.map(_.tags) must contain(be_==(Set("local", "host"))).forall
val attachmentOption = createdObservables.flatMap(_.attachment).headOption
- attachmentOption must beSome
- val attachment = attachmentOption.get
- attachment.name must beEqualTo("myfile.txt")
- attachment.hashes must containTheSameElementsAs(hashes)
- attachment.size must beEqualTo(tempFile.length())
- attachment.contentType must beEqualTo("text/plain")
+ attachmentOption must beNone
+ }
+ }
+
+ "be able to create and get 2 observables with string data and attachment" in testApp { app =>
+ WithFakeTemporaryFile { tempFile =>
+ val hasher = Hasher(app.apply[Configuration].get[Seq[String]]("attachment.hash"): _*)
+ val hashes = hasher.fromPath(tempFile.path).map(_.toString)
+ val helloHashes = hasher.fromString("Hello world").map(_.toString)
+ val files = Seq(FilePart("attachment", "myfile.txt", Some("text/plain"), tempFile))
+ val dataParts = Map("_json" -> Seq("""
+ {
+ "dataType":"file",
+ "ioc":false,
+ "sighted":false,
+ "tlp":2,
+ "message":"localhost",
+ "tags":["local", "host"],
+ "data":["hello.txt;text/plain;SGVsbG8gd29ybGQ="]
+ }
+ """))
+ val request = FakeRequest(
+ "POST",
+ s"/api/alert/testType;testSource;ref2/artifact",
+ Headers("user" -> "certuser@thehive.local"),
+ body = AnyContentAsMultipartFormData(MultipartFormData(dataParts, files, Nil))
+ )
+ val result = app[ObservableCtrl].createInAlert("testType;testSource;ref2")(request)
+ status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+ val createdObservables = contentAsJson(result).as[Seq[OutputObservable]]
+ createdObservables must have size 2
+ createdObservables.map(_.dataType) must contain(be_==("file")).forall
+ createdObservables.flatMap(_.data) must beEmpty
+ createdObservables.map(_.sighted) must contain(beFalse).forall
+ createdObservables.map(_.message) must contain(beSome("localhost")).forall
+ createdObservables.map(_.tags) must contain(be_==(Set("local", "host"))).forall
+ val attachments = createdObservables.flatMap(_.attachment)
+ attachments must have size 2
+ attachments must contain(beLike[OutputAttachment] {
+ case attachment =>
+ attachment.name must beEqualTo("myfile.txt")
+ attachment.hashes must containTheSameElementsAs(hashes)
+ attachment.size must beEqualTo(tempFile.length())
+ attachment.contentType must beEqualTo("text/plain")
+ })
+ attachments must contain(beLike[OutputAttachment] {
+ case attachment =>
+ attachment.name must beEqualTo("hello.txt")
+ attachment.hashes must containTheSameElementsAs(helloHashes)
+ attachment.size must beEqualTo(11)
+ attachment.contentType must beEqualTo("text/plain")
+ })
createdObservables.foreach(obs => obs must equalTo(getObservable(obs._id, app[ObservableCtrl])))
ok
}
@@ -219,7 +264,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
"data":"localhost"
}
"""))
- val result1 = app[ObservableCtrl].create("1")(request1)
+ val result1 = app[ObservableCtrl].createInCase("1")(request1)
status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}")
getData("localhost", app) must have size 1
@@ -233,7 +278,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
"data":"localhost"
}
"""))
- val result2 = app[ObservableCtrl].create("2")(request2)
+ val result2 = app[ObservableCtrl].createInCase("2")(request2)
status(result2) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result2)}")
getData("localhost", app) must have size 1
@@ -273,7 +318,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder {
"data":"${UUID.randomUUID()}\\n${UUID.randomUUID()}"
}
"""))
- val result = observableCtrl.create("1")(request)
+ val result = observableCtrl.createInCase("1")(request)
status(result) shouldEqual 201
contentAsJson(result).as[Seq[OutputObservable]]
diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala
new file mode 100644
index 0000000000..3df975824e
--- /dev/null
+++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala
@@ -0,0 +1,99 @@
+package org.thp.thehive.controllers.v1
+
+import io.scalaland.chimney.dsl._
+import org.thp.scalligraph.controllers.FakeTemporaryFile
+import org.thp.thehive.TestAppBuilder
+import org.thp.thehive.dto.v1.OutputPattern
+import play.api.libs.json.JsArray
+import play.api.mvc.MultipartFormData.FilePart
+import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData}
+import play.api.test.{FakeRequest, PlaySpecification}
+
+case class TestPattern(
+ patternId: String,
+ name: String,
+ description: Option[String],
+ tactics: Set[String],
+ url: String,
+ patternType: String,
+ platforms: Seq[String],
+ dataSources: Seq[String],
+ version: Option[String]
+)
+
+object TestPattern {
+ def apply(outputPattern: OutputPattern): TestPattern =
+ outputPattern.into[TestPattern].transform
+}
+
+class PatternCtrlTest extends PlaySpecification with TestAppBuilder {
+ "pattern controller" should {
+ "import json patterns" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/pattern/import/attack")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(
+ AnyContentAsMultipartFormData(
+ MultipartFormData(
+ dataParts = Map.empty,
+ files = Seq(FilePart("file", "patterns.json", Option("application/json"), FakeTemporaryFile.fromResource("/patterns.json"))),
+ badParts = Seq()
+ )
+ )
+ )
+
+ val result = app[PatternCtrl].importMitre(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(8)
+ }
+
+ "get a existing pattern" in testApp { app =>
+ val request = FakeRequest("GET", "/api/v1/pattern/T123")
+ .withHeaders("user" -> "certuser@thehive.local")
+
+ val result = app[PatternCtrl].get("T123")(request)
+ status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}")
+ val resultPattern = contentAsJson(result).as[OutputPattern]
+
+ TestPattern(resultPattern) must_=== TestPattern(
+ "T123",
+ "testPattern1",
+ Some("The testPattern 1"),
+ Set("testTactic1", "testTactic2"),
+ "http://test.pattern.url",
+ "unit-test",
+ Seq(),
+ Seq(),
+ Some("1.0")
+ )
+ }
+
+ "get patterns linked to case" in testApp { app =>
+ val request = FakeRequest("GET", "/api/v1/pattern/case/1")
+ .withHeaders("user" -> "certuser@thehive.local")
+
+ val result = app[PatternCtrl].getCasePatterns("1")(request)
+ status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(2)
+ }
+
+ "delete a pattern" in testApp { app =>
+ val request1 = FakeRequest("GET", "/api/v1/pattern/testPattern1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result1 = app[PatternCtrl].get("T123")(request1)
+ status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}")
+
+ val request2 = FakeRequest("DELETE", "/api/v1/pattern/testPattern1")
+ .withHeaders("user" -> "admin@thehive.local")
+ val result2 = app[PatternCtrl].delete("T123")(request2)
+ status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}")
+
+ val request3 = FakeRequest("GET", "/api/v1/pattern/testPattern1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result3 = app[PatternCtrl].get("T123")(request3)
+ status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}")
+ }
+
+ }
+}
diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala
new file mode 100644
index 0000000000..0b7d6fdf89
--- /dev/null
+++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala
@@ -0,0 +1,77 @@
+package org.thp.thehive.controllers.v1
+
+import io.scalaland.chimney.dsl.TransformerOps
+import org.thp.thehive.TestAppBuilder
+import org.thp.thehive.dto.v1.{InputProcedure, OutputProcedure}
+import play.api.libs.json.Json
+import play.api.test.{FakeRequest, PlaySpecification}
+
+import java.util.Date
+
+case class TestProcedure(
+ description: String,
+ occurence: Date,
+ patternId: String
+)
+
+object TestProcedure {
+ def apply(outputProcedure: OutputProcedure): TestProcedure =
+ outputProcedure.into[TestProcedure].transform
+}
+
+class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder {
+ "procedure controller" should {
+ "create a valid procedure" in testApp { app =>
+ val procedureDate = new Date()
+ val inputProcedure = InputProcedure(
+ "testProcedure3",
+ procedureDate,
+ "1",
+ "T123"
+ )
+
+ val request = FakeRequest("POST", "/api/v1/procedure")
+ .withJsonBody(Json.toJson(inputProcedure))
+ .withHeaders("user" -> "certadmin@thehive.local")
+
+ val result = app[ProcedureCtrl].create(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ val resultProcedure = contentAsJson(result).as[OutputProcedure]
+
+ TestProcedure(resultProcedure) must_=== TestProcedure(
+ "testProcedure3",
+ procedureDate,
+ "T123"
+ )
+ }
+
+ "delete a procedure" in testApp { app =>
+ val request1 = FakeRequest("POST", "/api/v1/procedure/testProcedure3")
+ .withJsonBody(
+ Json.toJson(
+ InputProcedure(
+ "testProcedure3",
+ new Date(),
+ "1",
+ "T123"
+ )
+ )
+ )
+ .withHeaders("user" -> "certadmin@thehive.local")
+ val result1 = app[ProcedureCtrl].create(request1)
+ val procedureId = contentAsJson(result1).as[OutputProcedure]._id
+ status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}")
+
+ val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure3")
+ .withHeaders("user" -> "certadmin@thehive.local")
+ val result2 = app[ProcedureCtrl].delete(procedureId)(request2)
+ status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}")
+
+ val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure3")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result3 = app[ProcedureCtrl].get(procedureId)(request3)
+ status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}")
+ }
+ }
+}
diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala
new file mode 100644
index 0000000000..21674a24c5
--- /dev/null
+++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala
@@ -0,0 +1,258 @@
+package org.thp.thehive.controllers.v1
+
+import org.thp.scalligraph.controllers.FakeTemporaryFile
+import org.thp.thehive.TestAppBuilder
+import org.thp.thehive.dto.v1._
+import play.api.libs.Files
+import play.api.libs.json.{JsArray, Json}
+import play.api.mvc.MultipartFormData.FilePart
+import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData}
+import play.api.test.{FakeRequest, PlaySpecification}
+
+case class TestTaxonomy(
+ namespace: String,
+ description: String,
+ version: Int,
+ tags: List[OutputTag]
+)
+
+object TestTaxonomy {
+ def apply(outputTaxonomy: OutputTaxonomy): TestTaxonomy =
+ TestTaxonomy(
+ outputTaxonomy.namespace,
+ outputTaxonomy.description,
+ outputTaxonomy.version,
+ outputTaxonomy.tags.toList
+ )
+}
+
+class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder {
+ "taxonomy controller" should {
+
+ val inputTaxo = InputTaxonomy(
+ "test-taxo",
+ "A test taxonomy",
+ 1,
+ None,
+ None,
+ List(
+ InputPredicate("pred1", None, None, None),
+ InputPredicate("pred2", None, None, None)
+ ),
+ Some(
+ List(
+ InputValue("pred1", List(InputEntry("entry1", None, None, None, None))),
+ InputValue(
+ "pred2",
+ List(
+ InputEntry("entry2", None, None, None, None),
+ InputEntry("entry21", None, None, None, None)
+ )
+ )
+ )
+ )
+ )
+
+ "create a valid taxonomy" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy")
+ .withJsonBody(Json.toJson(inputTaxo))
+ .withHeaders("user" -> "admin@thehive.local")
+
+ val result = app[TaxonomyCtrl].create(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ val resultCase = contentAsJson(result).as[OutputTaxonomy]
+
+ TestTaxonomy(resultCase) must_=== TestTaxonomy(
+ "test-taxo",
+ "A test taxonomy",
+ 1,
+ List(
+ OutputTag("test-taxo", "pred1", Some("entry1"), None, "#000000"),
+ OutputTag("test-taxo", "pred2", Some("entry2"), None, "#000000"),
+ OutputTag("test-taxo", "pred2", Some("entry21"), None, "#000000")
+ )
+ )
+ }
+
+ "return error if not admin" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy")
+ .withJsonBody(Json.toJson(inputTaxo))
+ .withHeaders("user" -> "certuser@thehive.local")
+
+ val result = app[TaxonomyCtrl].create(request)
+ status(result) must beEqualTo(403).updateMessage(s => s"$s\n${contentAsString(result)}")
+ (contentAsJson(result) \ "type").as[String] must beEqualTo("AuthorizationError")
+ }
+
+ "return error if namespace is present in database" in testApp { app =>
+ val alreadyInDatabase = inputTaxo.copy(namespace = "taxonomy1")
+
+ val request = FakeRequest("POST", "/api/v1/taxonomy")
+ .withJsonBody(Json.toJson(alreadyInDatabase))
+ .withHeaders("user" -> "admin@thehive.local")
+
+ val result = app[TaxonomyCtrl].create(request)
+ status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}")
+ (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest")
+ (contentAsJson(result) \ "message").as[String] must contain("already exists")
+ }
+
+ "return error if namespace is empty" in testApp { app =>
+ val emptyNamespace = inputTaxo.copy(namespace = "")
+
+ val request = FakeRequest("POST", "/api/v1/taxonomy")
+ .withJsonBody(Json.toJson(emptyNamespace))
+ .withHeaders("user" -> "admin@thehive.local")
+
+ val result = app[TaxonomyCtrl].create(request)
+ status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}")
+ (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest")
+
+ }
+
+ "get a taxonomy present" in testApp { app =>
+ val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "certuser@thehive.local")
+
+ val result = app[TaxonomyCtrl].get("taxonomy1")(request)
+ status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}")
+ val resultCase = contentAsJson(result).as[OutputTaxonomy]
+
+ TestTaxonomy(resultCase) must_=== TestTaxonomy(
+ "taxonomy1",
+ "The taxonomy 1",
+ 1,
+ List(OutputTag("taxonomy1", "pred1", Some("value1"), None, "#000000"))
+ )
+ }
+
+ "return error if taxonomy is not present in database" in testApp { app =>
+ val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy404")
+ .withHeaders("user" -> "admin@thehive.local")
+
+ val result = app[TaxonomyCtrl].get("taxonomy404")(request)
+ status(result) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result)}")
+ (contentAsJson(result) \ "type").as[String] must beEqualTo("NotFoundError")
+ }
+
+ "import zip file correctly" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag.zip")))
+
+ val result = app[TaxonomyCtrl].importZip(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ contentAsString(result) must not contain "Failure"
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(2)
+ }
+
+ "import zip file with folders correctly" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-folders.zip")))
+
+ val result = app[TaxonomyCtrl].importZip(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ contentAsString(result) must not contain "Failure"
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(2)
+ }
+
+ "return no error if zip file contains other files than taxonomies" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-otherfiles.zip")))
+
+ val result = app[TaxonomyCtrl].importZip(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+
+ contentAsString(result) must not contain "Failure"
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(1)
+ }
+
+ "return error if zip file contains an already present taxonomy" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-present.zip")))
+
+ val result = app[TaxonomyCtrl].importZip(request)
+ status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}")
+ contentAsString(result) must contain("Failure")
+ contentAsJson(result).as[JsArray].value.size must beEqualTo(2)
+ }
+
+ "return error if zip file contains a bad formatted taxonomy" in testApp { app =>
+ val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip")
+ .withHeaders("user" -> "admin@thehive.local")
+ .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-badformat.zip")))
+
+ val result = app[TaxonomyCtrl].importZip(request)
+ status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}")
+ (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest")
+ (contentAsJson(result) \ "message").as[String] must contain("formatting")
+ }
+
+ "activate a taxonomy" in testApp { app =>
+ val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result1 = app[TaxonomyCtrl].get("taxonomy2")(request1)
+ status(result1) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result1)}")
+
+ val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy2")
+ .withHeaders("user" -> "admin@thehive.local")
+ val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy2", isActive = true)(request2)
+ status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}")
+
+ val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result3 = app[TaxonomyCtrl].get("taxonomy2")(request3)
+ status(result3) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result3)}")
+ }
+
+ "deactivate a taxonomy" in testApp { app =>
+ val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1)
+ status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}")
+
+ val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy1/deactivate")
+ .withHeaders("user" -> "admin@thehive.local")
+ val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy1", isActive = false)(request2)
+ status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}")
+
+ val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3)
+ status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}")
+ }
+
+ "delete a taxonomy" in testApp { app =>
+ val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1)
+ status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}")
+
+ val request2 = FakeRequest("DELETE", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "admin@thehive.local")
+ val result2 = app[TaxonomyCtrl].delete("taxonomy1")(request2)
+ status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}")
+
+ val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1")
+ .withHeaders("user" -> "certuser@thehive.local")
+ val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3)
+ status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}")
+ }
+
+ }
+
+ def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] =
+ // file must be place in test/resources/
+ MultipartFormData(
+ dataParts = Map.empty,
+ files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))),
+ badParts = Seq()
+ )
+
+}
diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala
index 18f3b45ad0..23ee211e62 100644
--- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala
+++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala
@@ -107,6 +107,7 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder {
Permissions.manageCase,
Permissions.manageUser,
Permissions.managePage,
+ Permissions.manageProcedure,
Permissions.manageObservable,
Permissions.manageAlert,
Permissions.manageAction,
diff --git a/thehive/test/org/thp/thehive/models/TagTest.scala b/thehive/test/org/thp/thehive/models/TagTest.scala
index 29a9021c47..24a4c59a58 100644
--- a/thehive/test/org/thp/thehive/models/TagTest.scala
+++ b/thehive/test/org/thp/thehive/models/TagTest.scala
@@ -4,43 +4,43 @@ import play.api.test.PlaySpecification
class TagTest extends PlaySpecification {
val defaultNamespace: String = "_default_namespace_"
- val defaultColor: Int = 0xffff00
+ val defaultColour: String = "#ffff00"
- def parseTag(s: String): Tag = Tag.fromString(s, defaultNamespace, defaultColor)
+ def parseTag(s: String): Tag = Tag.fromString(s, defaultNamespace, defaultColour)
"tag" should {
"be parsed from key:value" in {
val tag = parseTag("Module:atest_blah_blah")
- tag must beEqualTo(Tag(defaultNamespace, "Module", Some("atest_blah_blah"), None, defaultColor))
+ tag must beEqualTo(Tag(defaultNamespace, "Module", Some("atest_blah_blah"), None, defaultColour))
tag.toString must beEqualTo("Module=\"atest_blah_blah\"")
}
"be parsed from key:value=" in {
val tag = parseTag("Id:7SeUoB3IBABD+tMh2PjVJYg==")
- tag must beEqualTo(Tag(defaultNamespace, "Id", Some("7SeUoB3IBABD+tMh2PjVJYg=="), None, defaultColor))
+ tag must beEqualTo(Tag(defaultNamespace, "Id", Some("7SeUoB3IBABD+tMh2PjVJYg=="), None, defaultColour))
tag.toString must beEqualTo("Id=\"7SeUoB3IBABD+tMh2PjVJYg==\"")
}
"be parsed from key: value" in {
val tag = parseTag("domain: google.com")
- tag must beEqualTo(Tag(defaultNamespace, "domain", Some("google.com"), None, defaultColor))
+ tag must beEqualTo(Tag(defaultNamespace, "domain", Some("google.com"), None, defaultColour))
tag.toString must beEqualTo("domain=\"google.com\"")
}
"be parsed from key: a.b.c.d" in {
val tag = parseTag("ip: 8.8.8.8")
- tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, defaultColor))
+ tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, defaultColour))
tag.toString must beEqualTo("ip=\"8.8.8.8\"")
}
"be parsed with colour" in {
val tag = parseTag("ip:8.8.8.8#FF00FF")
- tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, 0xFF00FF))
+ tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, "#FF00FF"))
tag.toString must beEqualTo("ip=\"8.8.8.8\"")
}
"be parsed with hash sign and colour" in {
val tag = parseTag("case:#42#FF00FF")
- tag must beEqualTo(Tag(defaultNamespace, "case", Some("#42"), None, 0xFF00FF))
+ tag must beEqualTo(Tag(defaultNamespace, "case", Some("#42"), None, "#FF00FF"))
tag.toString must beEqualTo("case=\"#42\"")
}
}
diff --git a/thehive/test/resources/data/CaseProcedure.json b/thehive/test/resources/data/CaseProcedure.json
new file mode 100644
index 0000000000..b39e53162c
--- /dev/null
+++ b/thehive/test/resources/data/CaseProcedure.json
@@ -0,0 +1,4 @@
+[
+ {"from": "case1", "to": "testProcedure1"},
+ {"from": "case1", "to": "testProcedure2"}
+]
\ No newline at end of file
diff --git a/thehive/test/resources/data/OrganisationTaxonomy.json b/thehive/test/resources/data/OrganisationTaxonomy.json
new file mode 100644
index 0000000000..df6a1338b2
--- /dev/null
+++ b/thehive/test/resources/data/OrganisationTaxonomy.json
@@ -0,0 +1,5 @@
+[
+ {"from": "admin", "to": "taxonomy1"},
+ {"from": "cert", "to": "taxonomy1"},
+ {"from": "soc", "to": "taxonomy1"}
+]
\ No newline at end of file
diff --git a/thehive/test/resources/data/Pattern.json b/thehive/test/resources/data/Pattern.json
new file mode 100644
index 0000000000..9a4a23e2ef
--- /dev/null
+++ b/thehive/test/resources/data/Pattern.json
@@ -0,0 +1,32 @@
+[
+ {
+ "id": "testPattern1",
+ "patternId": "T123",
+ "name": "testPattern1",
+ "description": "The testPattern 1",
+ "tactics": [
+ "testTactic1",
+ "testTactic2"
+ ],
+ "url": "http://test.pattern.url",
+ "patternType": "unit-test",
+ "platforms": [],
+ "dataSources": [],
+ "revision": "1.0"
+ },
+ {
+ "id": "testPattern2",
+ "patternId": "T234",
+ "name": "testPattern2",
+ "description": "The testPattern 2",
+ "tactics": [
+ "testTactic2",
+ "testTactic3"
+ ],
+ "url": "http://test.pattern2.url",
+ "patternType": "unit-test",
+ "platforms": [],
+ "dataSources": [],
+ "revision": "1.1"
+ }
+]
diff --git a/thehive/test/resources/data/Procedure.json b/thehive/test/resources/data/Procedure.json
new file mode 100644
index 0000000000..d908b1b1ad
--- /dev/null
+++ b/thehive/test/resources/data/Procedure.json
@@ -0,0 +1,12 @@
+[
+ {
+ "id": "testProcedure1",
+ "description": "The testProcedure 1",
+ "occurence": 1531667370000
+ },
+ {
+ "id": "testProcedure2",
+ "description": "The testProcedure 2",
+ "occurence": 1531667370000
+ }
+]
\ No newline at end of file
diff --git a/thehive/test/resources/data/ProcedurePattern.json b/thehive/test/resources/data/ProcedurePattern.json
new file mode 100644
index 0000000000..19541f6f82
--- /dev/null
+++ b/thehive/test/resources/data/ProcedurePattern.json
@@ -0,0 +1,4 @@
+[
+ {"from": "testProcedure1", "to": "testPattern1"},
+ {"from": "testProcedure2", "to": "testPattern2"}
+]
\ No newline at end of file
diff --git a/thehive/test/resources/data/Tag.json b/thehive/test/resources/data/Tag.json
index c6136decb4..30908714a4 100644
--- a/thehive/test/resources/data/Tag.json
+++ b/thehive/test/resources/data/Tag.json
@@ -4,69 +4,76 @@
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "t1",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagt2",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "t2",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagt3",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "t3",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagalert",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "alert",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagtest",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "test",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagspam",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "spam",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagsrc:mail",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "src:mail",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagtestDomain",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "testDomain",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "taghello",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "hello",
- "colour": 0
+ "colour": "#000000"
},
{
"id": "tagworld",
"namespace": "testNamespace",
"predicate": "testPredicate",
"value": "world",
- "colour": 0
+ "colour": "#000000"
+ },
+ {
+ "id": "taxonomy-tag1",
+ "namespace": "taxonomy1",
+ "predicate": "pred1",
+ "value": "value1",
+ "colour": "#000000"
}
]
\ No newline at end of file
diff --git a/thehive/test/resources/data/Taxonomy.json b/thehive/test/resources/data/Taxonomy.json
new file mode 100644
index 0000000000..5c661448dc
--- /dev/null
+++ b/thehive/test/resources/data/Taxonomy.json
@@ -0,0 +1,14 @@
+[
+ {
+ "id": "taxonomy1",
+ "namespace": "taxonomy1",
+ "description": "The taxonomy 1",
+ "version": 1
+ },
+ {
+ "id": "taxonomy2",
+ "namespace": "taxonomy2",
+ "description": "The taxonomy 2",
+ "version": 1
+ }
+]
\ No newline at end of file
diff --git a/thehive/test/resources/data/TaxonomyTag.json b/thehive/test/resources/data/TaxonomyTag.json
new file mode 100644
index 0000000000..80806c707c
--- /dev/null
+++ b/thehive/test/resources/data/TaxonomyTag.json
@@ -0,0 +1,3 @@
+[
+ {"from": "taxonomy1", "to": "taxonomy-tag1"}
+]
\ No newline at end of file
diff --git a/thehive/test/resources/machinetag-badformat.zip b/thehive/test/resources/machinetag-badformat.zip
new file mode 100644
index 0000000000..f18619f184
Binary files /dev/null and b/thehive/test/resources/machinetag-badformat.zip differ
diff --git a/thehive/test/resources/machinetag-folders.zip b/thehive/test/resources/machinetag-folders.zip
new file mode 100644
index 0000000000..f11bf049db
Binary files /dev/null and b/thehive/test/resources/machinetag-folders.zip differ
diff --git a/thehive/test/resources/machinetag-otherfiles.zip b/thehive/test/resources/machinetag-otherfiles.zip
new file mode 100644
index 0000000000..8150184178
Binary files /dev/null and b/thehive/test/resources/machinetag-otherfiles.zip differ
diff --git a/thehive/test/resources/machinetag-present.zip b/thehive/test/resources/machinetag-present.zip
new file mode 100644
index 0000000000..bcaa2464e5
Binary files /dev/null and b/thehive/test/resources/machinetag-present.zip differ
diff --git a/thehive/test/resources/machinetag.zip b/thehive/test/resources/machinetag.zip
new file mode 100644
index 0000000000..49be7a25c0
Binary files /dev/null and b/thehive/test/resources/machinetag.zip differ
diff --git a/thehive/test/resources/patterns.json b/thehive/test/resources/patterns.json
new file mode 100644
index 0000000000..3338d13fb7
--- /dev/null
+++ b/thehive/test/resources/patterns.json
@@ -0,0 +1,447 @@
+{
+ "type": "bundle",
+ "id": "bundle--ad5f3bce-004b-417e-899d-392f8591ab55",
+ "spec_version": "2.0",
+ "objects": [
+ {
+ "id": "attack-pattern--01df3350-ce05-4bdf-bdf8-0a919a66d4a8",
+ "name": ".bash_profile and .bashrc",
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1156",
+ "url": "https://attack.mitre.org/techniques/T1156"
+ },
+ {
+ "url": "https://researchcenter.paloaltonetworks.com/2017/04/unit42-new-iotlinux-malware-targets-dvrs-forms-botnet/",
+ "description": "Claud Xiao, Cong Zheng, Yanhui Jia. (2017, April 6). New IoT/Linux Malware Targets DVRs, Forms Botnet. Retrieved February 19, 2018.",
+ "source_name": "amnesia malware"
+ }
+ ],
+ "revoked": true,
+ "type": "attack-pattern",
+ "modified": "2020-01-24T14:14:05.452Z",
+ "created": "2017-12-14T16:46:06.044Z"
+ },
+ {
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1546.004",
+ "url": "https://attack.mitre.org/techniques/T1546/004"
+ },
+ {
+ "url": "https://researchcenter.paloaltonetworks.com/2017/04/unit42-new-iotlinux-malware-targets-dvrs-forms-botnet/",
+ "description": "Claud Xiao, Cong Zheng, Yanhui Jia. (2017, April 6). New IoT/Linux Malware Targets DVRs, Forms Botnet. Retrieved February 19, 2018.",
+ "source_name": "amnesia malware"
+ }
+ ],
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "name": ".bash_profile and .bashrc",
+ "description": "Adversaries may establish persistence by executing malicious content triggered by a user’s shell. ~/.bash_profile and ~/.bashrc are shell scripts that contain shell commands. These files are executed in a user's context when a new shell opens or when a user logs in so that their environment is set correctly.\n\n~/.bash_profile is executed for login shells and ~/.bashrc is executed for interactive non-login shells. This means that when a user logs in (via username and password) to the console (either locally or remotely via something like SSH), the ~/.bash_profile script is executed before the initial command prompt is returned to the user. After that, every time a new shell is opened, the ~/.bashrc script is executed. This allows users more fine-grained control over when they want certain commands executed. These shell scripts are meant to be written to by the local user to configure their own environment.\n\nThe macOS Terminal.app is a little different in that it runs a login shell by default each time a new terminal window is opened, thus calling ~/.bash_profile each time instead of ~/.bashrc .\n\nAdversaries may abuse these shell scripts by inserting arbitrary shell commands that may be used to execute other binaries to gain persistence. Every time the user logs in or opens a new shell, the modified ~/.bash_profile and/or ~/.bashrc scripts will be executed.(Citation: amnesia malware)",
+ "id": "attack-pattern--b63a34e8-0a61-4c97-a23b-bf8a2ed812e2",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "privilege-escalation"
+ },
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "persistence"
+ }
+ ],
+ "modified": "2020-03-24T16:28:04.990Z",
+ "created": "2020-01-24T14:13:45.936Z",
+ "x_mitre_version": "1.0",
+ "x_mitre_is_subtechnique": true,
+ "x_mitre_permissions_required": [
+ "User",
+ "Administrator"
+ ],
+ "x_mitre_detection": "While users may customize their ~/.bashrc and ~/.bash_profile files , there are only certain types of commands that typically appear in these files. Monitor for abnormal commands such as execution of unknown programs, opening network sockets, or reaching out across the network when user profiles are loaded during the login process.",
+ "x_mitre_data_sources": [
+ "Process use of network",
+ "Process command-line parameters",
+ "Process monitoring",
+ "File monitoring"
+ ],
+ "x_mitre_platforms": [
+ "Linux",
+ "macOS"
+ ]
+ },
+ {
+ "external_references": [
+ {
+ "url": "https://attack.mitre.org/techniques/T1003/008",
+ "external_id": "T1003.008",
+ "source_name": "mitre-attack"
+ },
+ {
+ "description": "The Linux Documentation Project. (n.d.). Linux Password and Shadow File Formats. Retrieved February 19, 2020.",
+ "url": "https://www.tldp.org/LDP/lame/LAME/linux-admin-made-easy/shadow-file-formats.html",
+ "source_name": "Linux Password and Shadow File Formats"
+ },
+ {
+ "description": "Vivek Gite. (2014, September 17). Linux Password Cracking: Explain unshadow and john Commands (John the Ripper Tool). Retrieved February 19, 2020.",
+ "url": "https://www.cyberciti.biz/faq/unix-linux-password-cracking-john-the-ripper/",
+ "source_name": "nixCraft - John the Ripper"
+ }
+ ],
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "name": "/etc/passwd and /etc/shadow",
+ "description": "Adversaries may attempt to dump the contents of /etc/passwd and /etc/shadow to enable offline password cracking. Most modern Linux operating systems use a combination of /etc/passwd and /etc/shadow to store user account information including password hashes in /etc/shadow . By default, /etc/shadow is only readable by the root user.(Citation: Linux Password and Shadow File Formats)\n\nThe Linux utility, unshadow, can be used to combine the two files in a format suited for password cracking utilities such as John the Ripper:(Citation: nixCraft - John the Ripper) # /usr/bin/unshadow /etc/passwd /etc/shadow > /tmp/crack.password.db \n",
+ "id": "attack-pattern--d0b4fcdb-d67d-4ed2-99ce-788b12f8c0f4",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "credential-access"
+ }
+ ],
+ "modified": "2020-03-20T15:56:55.022Z",
+ "created": "2020-02-11T18:46:56.263Z",
+ "x_mitre_detection": "The AuditD monitoring tool, which ships stock in many Linux distributions, can be used to watch for hostile processes attempting to access /etc/passwd and /etc/shadow , alerting on the pid, process name, and arguments of such programs.",
+ "x_mitre_permissions_required": [
+ "root"
+ ],
+ "x_mitre_version": "1.0",
+ "x_mitre_is_subtechnique": true,
+ "x_mitre_platforms": [
+ "Linux"
+ ]
+ },
+ {
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1557.002",
+ "url": "https://attack.mitre.org/techniques/T1557/002"
+ },
+ {
+ "source_name": "RFC826 ARP",
+ "url": "https://tools.ietf.org/html/rfc826",
+ "description": "Plummer, D. (1982, November). An Ethernet Address Resolution Protocol. Retrieved October 15, 2020."
+ },
+ {
+ "source_name": "Sans ARP Spoofing Aug 2003",
+ "url": "https://pen-testing.sans.org/resources/papers/gcih/real-world-arp-spoofing-105411",
+ "description": "Siles, R. (2003, August). Real World ARP Spoofing. Retrieved October 15, 2020."
+ },
+ {
+ "source_name": "Cylance Cleaver",
+ "description": "Cylance. (2014, December). Operation Cleaver. Retrieved September 14, 2017.",
+ "url": "https://www.cylance.com/content/dam/cylance/pages/operation-cleaver/Cylance_Operation_Cleaver_Report.pdf"
+ }
+ ],
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "name": "ARP Cache Poisoning",
+ "description": "Adversaries may poison Address Resolution Protocol (ARP) caches to position themselves between the communication of two or more networked devices. This activity may be used to enable follow-on behaviors such as [Network Sniffing](https://attack.mitre.org/techniques/T1040) or [Transmitted Data Manipulation](https://attack.mitre.org/techniques/T1565/002).\n\nThe ARP protocol is used to resolve IPv4 addresses to link layer addresses, such as a media access control (MAC) address.(Citation: RFC826 ARP) Devices in a local network segment communicate with each other by using link layer addresses. If a networked device does not have the link layer address of a particular networked device, it may send out a broadcast ARP request to the local network to translate the IP address to a MAC address. The device with the associated IP address directly replies with its MAC address. The networked device that made the ARP request will then use as well as store that information in its ARP cache.\n\nAn adversary may passively wait for an ARP request to poison the ARP cache of the requesting device. The adversary may reply with their MAC address, thus deceiving the victim by making them believe that they are communicating with the intended networked device. For the adversary to poison the ARP cache, their reply must be faster than the one made by the legitimate IP address owner. Adversaries may also send a gratuitous ARP reply that maliciously announces the ownership of a particular IP address to all the devices in the local network segment.\n\nThe ARP protocol is stateless and does not require authentication. Therefore, devices may wrongly add or update the MAC address of the IP address in their ARP cache.(Citation: Sans ARP Spoofing Aug 2003)(Citation: Cylance Cleaver)\n\nAdversaries may use ARP cache poisoning as a means to man-in-the-middle (MiTM) network traffic. This activity may be used to collect and/or relay data such as credentials, especially those sent over an insecure, unencrypted protocol.(Citation: Sans ARP Spoofing Aug 2003)\n",
+ "id": "attack-pattern--cabe189c-a0e3-4965-a473-dcff00f17213",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "credential-access"
+ },
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "collection"
+ }
+ ],
+ "modified": "2020-10-16T15:22:11.604Z",
+ "created": "2020-10-15T12:05:58.755Z",
+ "x_mitre_version": "1.0",
+ "x_mitre_is_subtechnique": true,
+ "x_mitre_permissions_required": [
+ "User"
+ ],
+ "x_mitre_detection": "Monitor network traffic for unusual ARP traffic, gratuitous ARP replies may be suspicious. \n\nConsider collecting changes to ARP caches across endpoints for signs of ARP poisoning. For example, if multiple IP addresses map to a single MAC address, this could be an indicator that the ARP cache has been poisoned.",
+ "x_mitre_data_sources": [
+ "Packet capture",
+ "Netflow/Enclave netflow"
+ ],
+ "x_mitre_contributors": [
+ "Jon Sternstein, Stern Security"
+ ],
+ "x_mitre_platforms": [
+ "Linux",
+ "Windows",
+ "macOS"
+ ]
+ },
+ {
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1558.004",
+ "url": "https://attack.mitre.org/techniques/T1558/004"
+ },
+ {
+ "source_name": "Harmj0y Roasting AS-REPs Jan 2017",
+ "url": "http://www.harmj0y.net/blog/activedirectory/roasting-as-reps/",
+ "description": "HarmJ0y. (2017, January 17). Roasting AS-REPs. Retrieved August 24, 2020."
+ },
+ {
+ "source_name": "Microsoft Kerberos Preauth 2014",
+ "url": "https://social.technet.microsoft.com/wiki/contents/articles/23559.kerberos-pre-authentication-why-it-should-not-be-disabled.aspx",
+ "description": "Sanyal, M.. (2014, March 18). Kerberos Pre-Authentication: Why It Should Not Be Disabled. Retrieved August 25, 2020."
+ },
+ {
+ "source_name": "Stealthbits Cracking AS-REP Roasting Jun 2019",
+ "url": "https://blog.stealthbits.com/cracking-active-directory-passwords-with-as-rep-roasting/",
+ "description": "Jeff Warren. (2019, June 27). Cracking Active Directory Passwords with AS-REP Roasting. Retrieved August 24, 2020."
+ },
+ {
+ "description": "Medin, T. (2014, November). Attacking Kerberos - Kicking the Guard Dog of Hades. Retrieved March 22, 2018.",
+ "source_name": "SANS Attacking Kerberos Nov 2014",
+ "url": "https://redsiege.com/kerberoast-slides"
+ },
+ {
+ "url": "https://adsecurity.org/?p=2293",
+ "description": "Metcalf, S. (2015, December 31). Cracking Kerberos TGS Tickets Using Kerberoast – Exploiting Kerberos to Compromise the Active Directory Domain. Retrieved March 22, 2018.",
+ "source_name": "AdSecurity Cracking Kerberos Dec 2015"
+ },
+ {
+ "url": "https://blogs.technet.microsoft.com/motiba/2018/02/23/detecting-kerberoasting-activity-using-azure-security-center/",
+ "description": "Bani, M. (2018, February 23). Detecting Kerberoasting activity using Azure Security Center. Retrieved March 23, 2018.",
+ "source_name": "Microsoft Detecting Kerberoasting Feb 2018"
+ },
+ {
+ "source_name": "Microsoft 4768 TGT 2017",
+ "url": "https://docs.microsoft.com/en-us/windows/security/threat-protection/auditing/event-4768",
+ "description": "Microsoft. (2017, April 19). 4768(S, F): A Kerberos authentication ticket (TGT) was requested. Retrieved August 24, 2020."
+ }
+ ],
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "name": "AS-REP Roasting",
+ "description": "Adversaries may reveal credentials of accounts that have disabled Kerberos preauthentication by [Password Cracking](https://attack.mitre.org/techniques/T1110/002) Kerberos messages.(Citation: Harmj0y Roasting AS-REPs Jan 2017) \n\nPreauthentication offers protection against offline [Password Cracking](https://attack.mitre.org/techniques/T1110/002). When enabled, a user requesting access to a resource initiates communication with the Domain Controller (DC) by sending an Authentication Server Request (AS-REQ) message with a timestamp that is encrypted with the hash of their password. If and only if the DC is able to successfully decrypt the timestamp with the hash of the user’s password, it will then send an Authentication Server Response (AS-REP) message that contains the Ticket Granting Ticket (TGT) to the user. Part of the AS-REP message is signed with the user’s password.(Citation: Microsoft Kerberos Preauth 2014)\n\nFor each account found without preauthentication, an adversary may send an AS-REQ message without the encrypted timestamp and receive an AS-REP message with TGT data which may be encrypted with an insecure algorithm such as RC4. The recovered encrypted data may be vulnerable to offline [Password Cracking](https://attack.mitre.org/techniques/T1110/002) attacks similarly to [Kerberoasting](https://attack.mitre.org/techniques/T1558/003) and expose plaintext credentials. (Citation: Harmj0y Roasting AS-REPs Jan 2017)(Citation: Stealthbits Cracking AS-REP Roasting Jun 2019) \n\nAn account registered to a domain, with or without special privileges, can be abused to list all domain accounts that have preauthentication disabled by utilizing Windows tools like [PowerShell](https://attack.mitre.org/techniques/T1059/001) with an LDAP filter. Alternatively, the adversary may send an AS-REQ message for each user. If the DC responds without errors, the account does not require preauthentication and the AS-REP message will already contain the encrypted data. (Citation: Harmj0y Roasting AS-REPs Jan 2017)(Citation: Stealthbits Cracking AS-REP Roasting Jun 2019)\n\nCracked hashes may enable [Persistence](https://attack.mitre.org/tactics/TA0003), [Privilege Escalation](https://attack.mitre.org/tactics/TA0004), and [Lateral Movement](https://attack.mitre.org/tactics/TA0008) via access to [Valid Accounts](https://attack.mitre.org/techniques/T1078).(Citation: SANS Attacking Kerberos Nov 2014)",
+ "id": "attack-pattern--3986e7fd-a8e9-4ecb-bfc6-55920855912b",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "credential-access"
+ }
+ ],
+ "modified": "2020-10-20T19:30:11.783Z",
+ "created": "2020-08-24T13:43:00.028Z",
+ "x_mitre_version": "1.0",
+ "x_mitre_is_subtechnique": true,
+ "x_mitre_system_requirements": [
+ "Valid domain account"
+ ],
+ "x_mitre_permissions_required": [
+ "User"
+ ],
+ "x_mitre_detection": "Enable Audit Kerberos Service Ticket Operations to log Kerberos TGS service ticket requests. Particularly investigate irregular patterns of activity (ex: accounts making numerous requests, Event ID 4768 and 4769, within a small time frame, especially if they also request RC4 encryption [Type 0x17], pre-authentication not required [Type: 0x0]).(Citation: AdSecurity Cracking Kerberos Dec 2015)(Citation: Microsoft Detecting Kerberoasting Feb 2018)(Citation: Microsoft 4768 TGT 2017)",
+ "x_mitre_data_sources": [
+ "Windows event logs",
+ "Authentication logs"
+ ],
+ "x_mitre_contributors": [
+ "James Dunn, @jamdunnDFW, EY",
+ "Swapnil Kumbhar",
+ "Jacques Pluviose, @Jacqueswildy_IT",
+ "Dan Nutting, @KerberToast"
+ ],
+ "x_mitre_platforms": [
+ "Windows"
+ ]
+ },
+ {
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1548",
+ "url": "https://attack.mitre.org/techniques/T1548"
+ }
+ ],
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "name": "Abuse Elevation Control Mechanism",
+ "description": "Adversaries may circumvent mechanisms designed to control elevate privileges to gain higher-level permissions. Most modern systems contain native elevation control mechanisms that are intended to limit privileges that a user can perform on a machine. Authorization has to be granted to specific users in order to perform tasks that can be considered of higher risk. An adversary can perform several methods to take advantage of built-in control mechanisms in order to escalate privileges on a system.",
+ "id": "attack-pattern--67720091-eee3-4d2d-ae16-8264567f6f5b",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "privilege-escalation"
+ },
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "defense-evasion"
+ }
+ ],
+ "modified": "2020-07-22T21:36:52.825Z",
+ "created": "2020-01-30T13:58:14.373Z",
+ "x_mitre_data_sources": [
+ "Windows Registry",
+ "File monitoring",
+ "Process command-line parameters",
+ "API monitoring",
+ "Process monitoring"
+ ],
+ "x_mitre_permissions_required": [
+ "Administrator",
+ "User"
+ ],
+ "x_mitre_detection": "Monitor the file system for files that have the setuid or setgid bits set. Also look for any process API calls for behavior that may be indicative of [Process Injection](https://attack.mitre.org/techniques/T1055) and unusual loaded DLLs through [DLL Search Order Hijacking](https://attack.mitre.org/techniques/T1574/001), which indicate attempts to gain access to higher privileged processes. On Linux, auditd can alert every time a user's actual ID and effective ID are different (this is what happens when you sudo).\n\nConsider monitoring for /usr/libexec/security_authtrampoline executions which may indicate that AuthorizationExecuteWithPrivileges is being executed. MacOS system logs may also indicate when AuthorizationExecuteWithPrivileges is being called. Monitoring OS API callbacks for the execution can also be a way to detect this behavior but requires specialized security tooling.\n\nOn Linux, auditd can alert every time a user's actual ID and effective ID are different (this is what happens when you sudo). This technique is abusing normal functionality in macOS and Linux systems, but sudo has the ability to log all input and output based on the LOG_INPUT and LOG_OUTPUT directives in the /etc/sudoers file.\n\nThere are many ways to perform UAC bypasses when a user is in the local administrator group on a system, so it may be difficult to target detection on all variations. Efforts should likely be placed on mitigation and collecting enough information on process launches and actions that could be performed before and after a UAC bypass is performed. Some UAC bypass methods rely on modifying specific, user-accessible Registry settings. Analysts should monitor Registry settings for unauthorized changes.",
+ "x_mitre_version": "1.0",
+ "x_mitre_is_subtechnique": false,
+ "x_mitre_platforms": [
+ "Linux",
+ "macOS",
+ "Windows"
+ ]
+ },
+ {
+ "object_marking_refs": [
+ "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168"
+ ],
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1134",
+ "url": "https://attack.mitre.org/techniques/T1134"
+ },
+ {
+ "external_id": "CAPEC-633",
+ "source_name": "capec",
+ "url": "https://capec.mitre.org/data/definitions/633.html"
+ },
+ {
+ "url": "https://pentestlab.blog/2017/04/03/token-manipulation/",
+ "description": "netbiosX. (2017, April 3). Token Manipulation. Retrieved April 21, 2017.",
+ "source_name": "Pentestlab Token Manipulation"
+ },
+ {
+ "url": "https://technet.microsoft.com/en-us/windows-server-docs/identity/ad-ds/manage/component-updates/command-line-process-auditing",
+ "description": "Mathers, B. (2017, March 7). Command line process auditing. Retrieved April 21, 2017.",
+ "source_name": "Microsoft Command-line Logging"
+ },
+ {
+ "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa378184(v=vs.85).aspx",
+ "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.",
+ "source_name": "Microsoft LogonUser"
+ },
+ {
+ "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa446617(v=vs.85).aspx",
+ "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.",
+ "source_name": "Microsoft DuplicateTokenEx"
+ },
+ {
+ "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa378612(v=vs.85).aspx",
+ "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.",
+ "source_name": "Microsoft ImpersonateLoggedOnUser"
+ },
+ {
+ "url": "https://www.blackhat.com/docs/eu-17/materials/eu-17-Atkinson-A-Process-Is-No-One-Hunting-For-Token-Manipulation.pdf",
+ "description": "Atkinson, J., Winchester, R. (2017, December 7). A Process is No One: Hunting for Token Manipulation. Retrieved December 21, 2017.",
+ "source_name": "BlackHat Atkinson Winchester Token Manipulation"
+ }
+ ],
+ "description": "Adversaries may modify access tokens to operate under a different user or system security context to perform actions and bypass access controls. Windows uses access tokens to determine the ownership of a running process. A user can manipulate access tokens to make a running process appear as though it is the child of a different process or belongs to someone other than the user that started the process. When this occurs, the process also takes on the security context associated with the new token.\n\nAn adversary can use built-in Windows API functions to copy access tokens from existing processes; this is known as token stealing. These token can then be applied to an existing process (i.e. [Token Impersonation/Theft](https://attack.mitre.org/techniques/T1134/001)) or used to spawn a new process (i.e. [Create Process with Token](https://attack.mitre.org/techniques/T1134/002)). An adversary must already be in a privileged user context (i.e. administrator) to steal a token. However, adversaries commonly use token stealing to elevate their security context from the administrator level to the SYSTEM level. An adversary can then use a token to authenticate to a remote system as the account for that token if the account has appropriate permissions on the remote system.(Citation: Pentestlab Token Manipulation)\n\nAny standard user can use the runas command, and the Windows API functions, to create impersonation tokens; it does not require access to an administrator account. There are also other mechanisms, such as Active Directory fields, that can be used to modify access tokens.",
+ "name": "Access Token Manipulation",
+ "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
+ "id": "attack-pattern--dcaa092b-7de9-4a21-977f-7fcb77e89c48",
+ "type": "attack-pattern",
+ "kill_chain_phases": [
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "defense-evasion"
+ },
+ {
+ "kill_chain_name": "mitre-attack",
+ "phase_name": "privilege-escalation"
+ }
+ ],
+ "modified": "2020-04-16T19:37:02.355Z",
+ "created": "2017-12-14T16:46:06.044Z",
+ "x_mitre_defense_bypassed": [
+ "Windows User Account Control",
+ "System access controls",
+ "File system access controls",
+ "Heuristic Detection",
+ "Host forensic analysis"
+ ],
+ "x_mitre_is_subtechnique": false,
+ "x_mitre_version": "2.0",
+ "x_mitre_contributors": [
+ "Tom Ueltschi @c_APT_ure",
+ "Travis Smith, Tripwire",
+ "Robby Winchester, @robwinchester3",
+ "Jared Atkinson, @jaredcatkinson"
+ ],
+ "x_mitre_data_sources": [
+ "Authentication logs",
+ "Windows event logs",
+ "API monitoring",
+ "Access tokens",
+ "Process monitoring",
+ "Process command-line parameters"
+ ],
+ "x_mitre_detection": "If an adversary is using a standard command-line shell, analysts can detect token manipulation by auditing command-line activity. Specifically, analysts should look for use of the runas command. Detailed command-line logging is not enabled by default in Windows.(Citation: Microsoft Command-line Logging)\n\nIf an adversary is using a payload that calls the Windows token APIs directly, analysts can detect token manipulation only through careful analysis of user network activity, examination of running processes, and correlation with other endpoint and network behavior. \n\nThere are many Windows API calls a payload can take advantage of to manipulate access tokens (e.g., LogonUser (Citation: Microsoft LogonUser), DuplicateTokenEx (Citation: Microsoft DuplicateTokenEx), and ImpersonateLoggedOnUser (Citation: Microsoft ImpersonateLoggedOnUser)). Please see the referenced Windows API pages for more information.\n\nQuery systems for process and thread token information and look for inconsistencies such as user owns processes impersonating the local SYSTEM account.(Citation: BlackHat Atkinson Winchester Token Manipulation)\n\nLook for inconsistencies between the various fields that store PPID information, such as the EventHeader ProcessId from data collected via Event Tracing for Windows (ETW), Creator Process ID/Name from Windows event logs, and the ProcessID and ParentProcessID (which are also produced from ETW and other utilities such as Task Manager and Process Explorer). The ETW provided EventHeader ProcessId identifies the actual parent process.",
+ "x_mitre_permissions_required": [
+ "User",
+ "Administrator"
+ ],
+ "x_mitre_effective_permissions": [
+ "SYSTEM"
+ ],
+ "x_mitre_platforms": [
+ "Windows"
+ ]
+ },
+ {
+ "external_references": [
+ {
+ "source_name": "mitre-attack",
+ "external_id": "T1015",
+ "url": "https://attack.mitre.org/techniques/T1015"
+ },
+ {
+ "external_id": "CAPEC-558",
+ "source_name": "capec",
+ "url": "https://capec.mitre.org/data/definitions/558.html"
+ },
+ {
+ "url": "https://www.fireeye.com/blog/threat-research/2012/08/hikit-rootkit-advanced-persistent-attack-techniques-part-1.html",
+ "description": "Glyer, C., Kazanciyan, R. (2012, August 20). The “Hikit” Rootkit: Advanced and Persistent Attack Techniques (Part 1). Retrieved June 6, 2016.",
+ "source_name": "FireEye Hikit Rootkit"
+ },
+ {
+ "url": "https://www.slideshare.net/DennisMaldonado5/sticky-keys-to-the-kingdom",
+ "description": "Maldonado, D., McGuffin, T. (2016, August 6). Sticky Keys to the Kingdom. Retrieved July 5, 2017.",
+ "source_name": "DEFCON2016 Sticky Keys"
+ },
+ {
+ "url": "http://blog.crowdstrike.com/registry-analysis-with-crowdresponse/",
+ "description": "Tilbury, C. (2014, August 28). Registry Analysis with CrowdResponse. Retrieved November 12, 2014.",
+ "source_name": "Tilbury 2014"
+ }
+ ],
+ "name": "Accessibility Features",
+ "id": "attack-pattern--9b99b83a-1aac-4e29-b975-b374950551a3",
+ "revoked": true,
+ "type": "attack-pattern",
+ "modified": "2020-05-13T20:37:30.008Z",
+ "created": "2017-05-31T21:30:26.946Z"
+ }
+]
+}
\ No newline at end of file
|