From d1935bc6adbf9c2bc3b7fa9dab99fad0d9cf05e3 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 11 Nov 2020 10:50:39 +0100 Subject: [PATCH 01/93] Added taxonomy to database's schema --- .../controllers/v1/TheHiveQueryExecutor.scala | 2 +- .../org/thp/thehive/models/Organisation.scala | 3 ++ .../models/TheHiveSchemaDefinition.scala | 32 ++++++++++++++++++- 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index 2ee8a82cf2..bbc3b86b81 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -2,7 +2,7 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{FObject, FieldsParser} -import org.thp.scalligraph.models.{Database, UMapping} +import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ case class OutputParam(from: Long, to: Long, extraData: Set[String]) diff --git a/thehive/app/org/thp/thehive/models/Organisation.scala b/thehive/app/org/thp/thehive/models/Organisation.scala index 41ca8dd5c2..b7ad03b1fc 100644 --- a/thehive/app/org/thp/thehive/models/Organisation.scala +++ b/thehive/app/org/thp/thehive/models/Organisation.scala @@ -20,6 +20,9 @@ case class OrganisationShare() @BuildEdgeEntity[Organisation, Organisation] case class OrganisationOrganisation() +@BuildEdgeEntity[Organisation, Taxonomy] +case class OrganisationTaxonomy() + case class RichOrganisation(organisation: Organisation with Entity, links: Seq[Organisation with Entity]) { def name: String = organisation.name def description: String = organisation.description diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 62683434d6..38953a7a51 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -9,9 +9,11 @@ import org.janusgraph.graphdb.types.TypeDefinitionCategory import org.reflections.Reflections import org.reflections.scanners.SubTypesScanner import org.reflections.util.ConfigurationBuilder +import org.thp.scalligraph.{EntityId, RichSeq} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.janus.JanusDatabase import org.thp.scalligraph.models._ +import org.thp.scalligraph.traversal.Traversal import org.thp.scalligraph.traversal.TraversalOps._ import play.api.Logger @@ -74,8 +76,36 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { Success(()) } .addProperty[Option[Boolean]]("Observable", "ignoreSimilarity") + // Taxonomies + .addVertexModel[String]("Taxonomy", Seq("namespace")) + .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => + db.tryTransaction { g => + db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + val taxoVertex = g.addVertex("Taxonomy") + taxoVertex.property("namespace", "Custom") + o.addEdge("OrganisationTaxonomy", taxoVertex) + Success(()) + } + }.map(_ => ()) + } + .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => + db.tryTransaction { implicit g => + db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "Custom").head + Traversal.V(EntityId(o.id())).unionFlat( + _.out("OrganisationShare").out("ShareCase").out("CaseTag"), + _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), + _.in("AlertOrganisation").out("AlertTag"), + _.in("CaseTemplateOrganisation").out("CaseTemplateTag") + ).toSeq.foreach(tag => + customTaxo.addEdge("TaxonomyTag", tag) + ) + Success(()) + } + }.map(_ => ()) + } - val reflectionClasses = new Reflections( + val reflectionClasses = new Reflections( new ConfigurationBuilder() .forPackages("org.thp.thehive.models") .addClassLoader(getClass.getClassLoader) From 5f9d4b991f664d02d4c254c40295d04edd5a06a4 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 12 Nov 2020 11:13:26 +0100 Subject: [PATCH 02/93] WIP adding taxonomy routes & mapping --- .../org/thp/thehive/dto/v0/Taxonomy.scala | 43 +++++++++++ .../org/thp/thehive/dto/v1/Taxonomy.scala | 43 +++++++++++ .../thp/thehive/controllers/v0/CaseCtrl.scala | 23 ++++-- .../thehive/controllers/v0/Conversion.scala | 21 ++++++ .../thehive/controllers/v0/TaxonomyCtrl.scala | 74 +++++++++++++++++++ .../thehive/controllers/v1/Conversion.scala | 22 ++++++ .../org/thp/thehive/models/Permissions.scala | 2 + .../app/org/thp/thehive/models/Taxonomy.scala | 36 +++++++++ .../thehive/services/OrganisationSrv.scala | 2 + .../thp/thehive/services/TaxonomySrv.scala | 66 +++++++++++++++++ 10 files changed, 324 insertions(+), 8 deletions(-) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala create mode 100644 thehive/app/org/thp/thehive/models/Taxonomy.scala create mode 100644 thehive/app/org/thp/thehive/services/TaxonomySrv.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala new file mode 100644 index 0000000000..a5af5ff61e --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala @@ -0,0 +1,43 @@ +package org.thp.thehive.dto.v0 + +import java.util.Date + +import play.api.libs.json.{Json, OFormat, OWrites} + +case class InputTaxonomy ( + namespace: String, + description: String, + version: Int, + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate(value: String, expanded: String) + +case class InputValue(predicate: String, entry: Seq[InputPredicate]) + +object InputTaxonomy { + implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +} + +case class OutputTaxonomy( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + namespace: String, + description: String, + version: Int, + predicates: Seq[OutputPredicate], + values: Option[Seq[OutputValue]] +) + +case class OutputPredicate(value: String, expanded: String) + +case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) + +object OutputTaxonomy { + implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} \ No newline at end of file diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala new file mode 100644 index 0000000000..1c6a1b2bc9 --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -0,0 +1,43 @@ +package org.thp.thehive.dto.v1 + +import java.util.Date + +import play.api.libs.json.{Json, OFormat, OWrites} + +case class InputTaxonomy ( + namespace: String, + description: String, + version: Int, + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate(value: String, expanded: String) + +case class InputValue(predicate: String, entry: Seq[InputPredicate]) + +object InputTaxonomy { + implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +} + +case class OutputTaxonomy( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + namespace: String, + description: String, + version: Int, + predicates: Seq[OutputPredicate], + values: Option[Seq[OutputValue]] +) + +case class OutputPredicate(value: String, expanded: String) + +case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) + +object OutputTaxonomy { + implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala index aa94cfbd82..7c6b8db99e 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala @@ -195,13 +195,17 @@ class PublicCase @Inject() ( with CaseRenderer { override val entityName: String = "case" override val initialQuery: Query = - Query.init[Traversal.V[Case]]("listCase", (graph, authContext) => organisationSrv.get(authContext.organisation)(graph).cases) - override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Case]]( - "getCase", - FieldsParser[EntityIdOrName], - (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(authContext) - ) - override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput]( + Query.init[Traversal.V[Case]]("listCase", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).cases + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Case]]( + "getCase", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(authContext) + ) + override val pageQuery: ParamQuery[OutputParam] = + Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput]( "page", FieldsParser[OutputParam], { @@ -215,7 +219,10 @@ class PublicCase @Inject() ( } } ) - override val outputQuery: Query = Query.outputWithContext[RichCase, Traversal.V[Case]]((caseSteps, authContext) => caseSteps.richCase(authContext)) + override val outputQuery: + Query = Query.outputWithContext[RichCase, Traversal.V[Case]]((caseSteps, authContext) => + caseSteps.richCase(authContext) + ) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query[Traversal.V[Case], Traversal.V[Observable]]("observables", (caseSteps, authContext) => caseSteps.observables(authContext)), Query[Traversal.V[Case], Traversal.V[Task]]("tasks", (caseSteps, authContext) => caseSteps.tasks(authContext)) diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala index f972afd972..345a2a643e 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala @@ -571,6 +571,27 @@ object Conversion { .transform } + implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { + + def toTaxonomy: Taxonomy = + inputTaxonomy + .into[Taxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .transform + } + + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.predicates) + .withFieldComputed(_.values, _.values) + .transform + ) + implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala new file mode 100644 index 0000000000..ac6622e6d2 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala @@ -0,0 +1,74 @@ +package org.thp.thehive.controllers.v0 + +import javax.inject.{Inject, Named, Singleton} +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.{Database, UMapping} +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, PublicPropertyListBuilder, Query, QueryExecutor} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v0.Conversion.taxonomyOutput +import org.thp.thehive.dto.v1.InputTaxonomy +import org.thp.thehive.models.{Permissions, RichTaxonomy, Taxonomy} +import org.thp.thehive.services.{OrganisationSrv, TaxonomySrv} +import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ +import play.api.mvc.{Action, AnyContent} + +class TaxonomyCtrl @Inject() ( + override val entrypoint: Entrypoint, + @Named("with-thehive-schema") override val db: Database, + @Named("v0") override val queryExecutor: QueryExecutor, + override val publicData: PublicTaxonomy +) extends QueryCtrl { + def importTaxonomy: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("file", FieldsParser.file.optional.on("file")) + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: Option[FFile] = request.body("file") + val taxonomy: InputTaxonomy = request.body("taxonomy") + + // Create Taxonomy vertex + // Create Tags associated + // Add edge orgaTaxo + + ??? + } + +} + +@Singleton +class PublicTaxonomy @Inject() ( + taxonomySrv: TaxonomySrv, + organisationSrv: OrganisationSrv +) extends PublicData { + override val entityName: String = "taxonomy" + override val initialQuery: Query = + Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).taxonomies + ) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, taxoSteps, _) => taxoSteps.page(range.from, range.to, withTotal = true)(???) + ) + override val outputQuery: Query = Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((taxonomySteps, authContext) => + taxonomySteps.richTaxonomy(authContext) + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( + "getTaxonomy", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) + ) + override val publicProperties: PublicProperties = + PublicPropertyListBuilder[Taxonomy] + .property("namespace", UMapping.string)(_.field.readonly) + .property("description", UMapping.string)(_.field.readonly) + .property("version", UMapping.int)(_.field.readonly) + // Predicates ? + // Values ? + .build + +} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 5c569fb0d0..759df983db 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -5,6 +5,7 @@ import java.util.Date import io.scalaland.chimney.dsl._ import org.thp.scalligraph.controllers.Renderer import org.thp.scalligraph.models.Entity +import org.thp.thehive.dto.v0.{InputTaxonomy, OutputTaxonomy} import org.thp.thehive.dto.v1._ import org.thp.thehive.models._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -251,6 +252,27 @@ object Conversion { .transform } + implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { + + def toTaxonomy: Taxonomy = + inputTaxonomy + .into[Taxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .transform + } + + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.predicates) + .withFieldComputed(_.values, _.values) + .transform + ) + implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index bf10a22dde..81ea621740 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,6 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") @@ -34,6 +35,7 @@ object Permissions extends Perms { manageAction, manageConfig, manageProfile, + manageTaxonomy, manageTag, manageCustomField, manageShare, diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala new file mode 100644 index 0000000000..3ed3c6f0cb --- /dev/null +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -0,0 +1,36 @@ +package org.thp.thehive.models + +import java.util.Date + +import org.thp.scalligraph.models.Entity +import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} + +@BuildVertexEntity +case class Taxonomy( + namespace: String, + description: String, + version: Int +) + +case class Predicate(value: String) + +case class Value(predicate: String, entry: Seq[String]) + +@BuildEdgeEntity[Taxonomy, Tag] +case class TaxonomyTag() + +case class RichTaxonomy( + taxonomy: Taxonomy with Entity, + predicates: Seq[Predicate], + values: Seq[Value] +) { + def _id: EntityId = taxonomy._id + def _createdBy: String = taxonomy._createdBy + def _updatedBy: Option[String] = taxonomy._updatedBy + def _createdAt: Date = taxonomy._createdAt + def _updatedAt: Option[Date] = taxonomy._updatedAt + def namespace: String = taxonomy.namespace + def description: String = taxonomy.description + def version: Int = taxonomy.version + +} diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 69af5f84de..e74b249a54 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -138,6 +138,8 @@ object OrganisationOps { def shares: Traversal.V[Share] = traversal.out[OrganisationShare].v[Share] + def taxonomies: Traversal.V[Taxonomy] = traversal.out[OrganisationTaxonomy].v[Taxonomy] + def caseTemplates: Traversal.V[CaseTemplate] = traversal.in[CaseTemplateOrganisation].v[CaseTemplate] def users(requiredPermission: Permission): Traversal.V[User] = diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala new file mode 100644 index 0000000000..7bd16db5c6 --- /dev/null +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -0,0 +1,66 @@ +package org.thp.thehive.services + +import java.util.{Map => JMap} + +import javax.inject.{Inject, Named} +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.thehive.models.{Organisation, OrganisationTaxonomy, Predicate, RichTaxonomy, Tag, Taxonomy, TaxonomyTag, Value} +import org.thp.thehive.services.OrganisationOps._ + +import scala.util.Try + +@Singleton +class TaxonomySrv @Inject() ( +)(implicit + @Named("with-thehive-schema") db: Database +) extends VertexSrv[Taxonomy] { + + val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] + + def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + for { + taxonomy <- createEntity(taxo) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- RichTaxonomy(taxonomy, ???, ???) + } yield richTaxonomy +} + +object TaxonomyOps { + implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + + def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) + + def visible(organisationIdOrName: EntityIdOrName): Traversal.V[Taxonomy] = + traversal.filter(_.organisations.get(organisationIdOrName)) + + def organisations: Traversal.V[Organisation] = traversal.in[OrganisationTaxonomy].v[Organisation] + + def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag] + + def richTaxonomy(implicit authContext: AuthContext): Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.tags.fold) + ) + .domainMap { + case (taxonomy, tags) => + val predicates = tags.map(t => Predicate(t.predicate)).distinct + val values = predicates.map { p => + val tagValues = tags + .filter(_.predicate == p.value) + .filter(_.value.isDefined) + .map(_.value.get) + Value(p.value, tagValues) + } + RichTaxonomy(taxonomy, predicates, values) + } + + } +} From 95a425dea7e76c5ba96d1b68e9ea793c33ab5718 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 12 Nov 2020 18:52:40 +0100 Subject: [PATCH 03/93] WIP Continued mapping for taxonomies --- .../org/thp/thehive/dto/v0/Taxonomy.scala | 43 -------- .../org/thp/thehive/dto/v1/Taxonomy.scala | 17 +-- .../thehive/controllers/v0/Conversion.scala | 21 ---- .../thehive/controllers/v0/TaxonomyCtrl.scala | 74 ------------- .../thehive/controllers/v1/Conversion.scala | 26 +++-- .../thehive/controllers/v1/Properties.scala | 10 ++ .../thehive/controllers/v1/TaxonomyCtrl.scala | 103 ++++++++++++++++++ .../app/org/thp/thehive/models/Taxonomy.scala | 8 +- .../app/org/thp/thehive/services/TagSrv.scala | 12 +- .../thp/thehive/services/TaxonomySrv.scala | 42 +++---- 10 files changed, 168 insertions(+), 188 deletions(-) delete mode 100644 dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala delete mode 100644 thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala deleted file mode 100644 index a5af5ff61e..0000000000 --- a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala +++ /dev/null @@ -1,43 +0,0 @@ -package org.thp.thehive.dto.v0 - -import java.util.Date - -import play.api.libs.json.{Json, OFormat, OWrites} - -case class InputTaxonomy ( - namespace: String, - description: String, - version: Int, - predicates: Seq[InputPredicate], - values: Option[Seq[InputValue]] -) - -case class InputPredicate(value: String, expanded: String) - -case class InputValue(predicate: String, entry: Seq[InputPredicate]) - -object InputTaxonomy { - implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] -} - -case class OutputTaxonomy( - _id: String, - _type: String, - _createdBy: String, - _updatedBy: Option[String] = None, - _createdAt: Date, - _updatedAt: Option[Date] = None, - namespace: String, - description: String, - version: Int, - predicates: Seq[OutputPredicate], - values: Option[Seq[OutputValue]] -) - -case class OutputPredicate(value: String, expanded: String) - -case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) - -object OutputTaxonomy { - implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] -} \ No newline at end of file diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 1c6a1b2bc9..a2d05e879c 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -4,17 +4,18 @@ import java.util.Date import play.api.libs.json.{Json, OFormat, OWrites} +// TODO make sure of input format case class InputTaxonomy ( namespace: String, description: String, version: Int, - predicates: Seq[InputPredicate], - values: Option[Seq[InputValue]] + predicates: Seq[String], + values: Option[Seq[InputEntry]] ) -case class InputPredicate(value: String, expanded: String) +case class InputEntry(predicate: String, entry: Seq[InputValue]) -case class InputValue(predicate: String, entry: Seq[InputPredicate]) +case class InputValue(value: String, expanded: String, colour: Option[String]) object InputTaxonomy { implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] @@ -30,13 +31,13 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, - predicates: Seq[OutputPredicate], - values: Option[Seq[OutputValue]] + predicates: Seq[String], + values: Option[Seq[OutputEntry]] ) -case class OutputPredicate(value: String, expanded: String) +case class OutputEntry(predicate: String, entry: Seq[OutputValue]) -case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) +case class OutputValue(value: String, expanded: String) object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala index 345a2a643e..f972afd972 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala @@ -571,27 +571,6 @@ object Conversion { .transform } - implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { - - def toTaxonomy: Taxonomy = - inputTaxonomy - .into[Taxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .transform - } - - implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( - _.into[OutputTaxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.predicates, _.predicates) - .withFieldComputed(_.values, _.values) - .transform - ) - implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala deleted file mode 100644 index ac6622e6d2..0000000000 --- a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala +++ /dev/null @@ -1,74 +0,0 @@ -package org.thp.thehive.controllers.v0 - -import javax.inject.{Inject, Named, Singleton} -import org.thp.scalligraph.EntityIdOrName -import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} -import org.thp.scalligraph.models.{Database, UMapping} -import org.thp.scalligraph.query.{ParamQuery, PublicProperties, PublicPropertyListBuilder, Query, QueryExecutor} -import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs -import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} -import org.thp.thehive.controllers.v0.Conversion.taxonomyOutput -import org.thp.thehive.dto.v1.InputTaxonomy -import org.thp.thehive.models.{Permissions, RichTaxonomy, Taxonomy} -import org.thp.thehive.services.{OrganisationSrv, TaxonomySrv} -import org.thp.thehive.services.OrganisationOps._ -import org.thp.thehive.services.TaxonomyOps._ -import play.api.mvc.{Action, AnyContent} - -class TaxonomyCtrl @Inject() ( - override val entrypoint: Entrypoint, - @Named("with-thehive-schema") override val db: Database, - @Named("v0") override val queryExecutor: QueryExecutor, - override val publicData: PublicTaxonomy -) extends QueryCtrl { - def importTaxonomy: Action[AnyContent] = - entrypoint("import taxonomy") - .extract("file", FieldsParser.file.optional.on("file")) - .extract("taxonomy", FieldsParser[InputTaxonomy]) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val file: Option[FFile] = request.body("file") - val taxonomy: InputTaxonomy = request.body("taxonomy") - - // Create Taxonomy vertex - // Create Tags associated - // Add edge orgaTaxo - - ??? - } - -} - -@Singleton -class PublicTaxonomy @Inject() ( - taxonomySrv: TaxonomySrv, - organisationSrv: OrganisationSrv -) extends PublicData { - override val entityName: String = "taxonomy" - override val initialQuery: Query = - Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => - organisationSrv.get(authContext.organisation)(graph).taxonomies - ) - override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( - "page", - FieldsParser[OutputParam], - (range, taxoSteps, _) => taxoSteps.page(range.from, range.to, withTotal = true)(???) - ) - override val outputQuery: Query = Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((taxonomySteps, authContext) => - taxonomySteps.richTaxonomy(authContext) - ) - override val getQuery: ParamQuery[EntityIdOrName] = - Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( - "getTaxonomy", - FieldsParser[EntityIdOrName], - (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) - ) - override val publicProperties: PublicProperties = - PublicPropertyListBuilder[Taxonomy] - .property("namespace", UMapping.string)(_.field.readonly) - .property("description", UMapping.string)(_.field.readonly) - .property("version", UMapping.int)(_.field.readonly) - // Predicates ? - // Values ? - .build - -} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 759df983db..27e11ab729 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -5,7 +5,7 @@ import java.util.Date import io.scalaland.chimney.dsl._ import org.thp.scalligraph.controllers.Renderer import org.thp.scalligraph.models.Entity -import org.thp.thehive.dto.v0.{InputTaxonomy, OutputTaxonomy} +import org.thp.thehive.dto.v1.{InputTaxonomy, OutputTaxonomy} import org.thp.thehive.dto.v1._ import org.thp.thehive.models._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -263,15 +263,20 @@ object Conversion { .transform } - implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( - _.into[OutputTaxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.predicates, _.predicates) - .withFieldComputed(_.values, _.values) - .transform - ) + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = + Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) + .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { + val outputValues = entryMap.getOrElse(tag.predicate, Seq()) + val value = OutputValue(tag.value.getOrElse(""), tag.description.getOrElse("")) + entryMap + (tag.predicate -> (outputValues :+ value)) + }).map(e => OutputEntry(e._1, e._2))) + .transform + ) implicit class InputUserOps(inputUser: InputUser) { @@ -357,6 +362,7 @@ object Conversion { .withFieldComputed(_.tlp, _.tlp.getOrElse(2)) .transform } + implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable](richObservable => richObservable .into[OutputObservable] diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index f435c9b3d3..a7d78bc02a 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -375,4 +375,14 @@ class Properties @Inject() ( .property("data", UMapping.string.optional)(_.select(_.data.value(_.data)).readonly) // TODO add attachment ? .build + + lazy val taxonomy: PublicProperties = + PublicPropertyListBuilder[Taxonomy] + .property("namespace", UMapping.string)(_.field.readonly) + .property("description", UMapping.string)(_.field.readonly) + .property("version", UMapping.int)(_.field.readonly) + // Predicates ? + // Values ? + .build + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala new file mode 100644 index 0000000000..d60db4d141 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -0,0 +1,103 @@ +package org.thp.thehive.controllers.v1 + +import javax.inject.{Inject, Named} +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.Database +import org.thp.scalligraph.query._ +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.InputTaxonomy +import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} +import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ +import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} +import play.api.mvc.{Action, AnyContent, Results} + +import scala.+: + +class TaxonomyCtrl @Inject() ( + entrypoint: Entrypoint, + properties: Properties, + taxonomySrv: TaxonomySrv, + organisationSrv: OrganisationSrv, + tagSrv: TagSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + + override val entityName: String = "taxonomy" + override val publicProperties: PublicProperties = properties.taxonomy + override val initialQuery: Query = + Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).taxonomies + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( + "getTaxonomy", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) + ) + override val pageQuery: ParamQuery[OutputParam] = + Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, traversal, authContext) => + traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy(authContext)) + ) + override val outputQuery: Query = + Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, authContext) => + traversal.richTaxonomy(authContext) + ) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags) + ) + + def importTaxonomy: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("file", FieldsParser.file.optional.on("file")) + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: Option[FFile] = request.body("file") + val inputTaxo: InputTaxonomy = request.body("taxonomy") + + // TODO Parse file & combine with body + + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + Some(e.expanded), + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour)) + + ) + } + ) + + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + ) + + for { + tagsEntities <- tags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) + } + + def delete(namespace: String): Action[AnyContent] = + entrypoint("delete taxonomy") + .authTransaction(db) { implicit request => implicit graph => + for { + t <- taxonomySrv.getByNamespace(namespace) + + } yield Results.Nocontent + } + +} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 3ed3c6f0cb..925956d9cd 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -12,17 +12,12 @@ case class Taxonomy( version: Int ) -case class Predicate(value: String) - -case class Value(predicate: String, entry: Seq[String]) - @BuildEdgeEntity[Taxonomy, Tag] case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - predicates: Seq[Predicate], - values: Seq[Value] + tags: Seq[Tag with Entity] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy @@ -32,5 +27,4 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version - } diff --git a/thehive/app/org/thp/thehive/services/TagSrv.scala b/thehive/app/org/thp/thehive/services/TagSrv.scala index 7b78029f4f..a1ade9e7a3 100644 --- a/thehive/app/org/thp/thehive/services/TagSrv.scala +++ b/thehive/app/org/thp/thehive/services/TagSrv.scala @@ -19,27 +19,31 @@ class TagSrv @Inject() (appConfig: ApplicationConfig, @Named("integrity-check-ac @Named("with-thehive-schema") db: Database ) extends VertexSrv[Tag] { - val autoCreateConfig: ConfigItem[Boolean, Boolean] = + private val autoCreateConfig: ConfigItem[Boolean, Boolean] = appConfig.item[Boolean]("tags.autocreate", "If true, create automatically tag if it doesn't exist") def autoCreate: Boolean = autoCreateConfig.get - val defaultNamespaceConfig: ConfigItem[String, String] = + private val defaultNamespaceConfig: ConfigItem[String, String] = appConfig.item[String]("tags.defaultNamespace", "Default namespace of the automatically created tags") def defaultNamespace: String = defaultNamespaceConfig.get - val defaultColourConfig: ConfigItem[String, Int] = + private val defaultColourConfig: ConfigItem[String, Int] = appConfig.mapItem[String, Int]( "tags.defaultColour", "Default colour of the automatically created tags", { - case s if s(0) == '#' => Try(Integer.parseUnsignedInt(s.tail, 16)).getOrElse(defaultColour) + case s if s(0) == '#' => parseTagColour(s.tail) case _ => defaultColour } ) + def defaultColour: Int = defaultColourConfig.get + // TODO Duplication in Tag.scala + def parseTagColour(c: String) = Try(Integer.parseUnsignedInt(c, 16)).getOrElse(defaultColour) + def parseString(tagName: String): Tag = Tag.fromString(tagName, defaultNamespace, defaultColour) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 7bd16db5c6..8a7e906979 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -4,36 +4,48 @@ import java.util.{Map => JMap} import javax.inject.{Inject, Named} import org.apache.tinkerpop.gremlin.structure.Graph -import org.thp.scalligraph.{EntityIdOrName, RichSeq} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} -import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs -import org.thp.thehive.models.{Organisation, OrganisationTaxonomy, Predicate, RichTaxonomy, Tag, Taxonomy, TaxonomyTag, Value} +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ import scala.util.Try @Singleton class TaxonomySrv @Inject() ( -)(implicit - @Named("with-thehive-schema") db: Database + organisationSrv: OrganisationSrv, + tagSrv: TagSrv +)(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Taxonomy] { val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] + val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - taxonomy <- createEntity(taxo) - _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- RichTaxonomy(taxonomy, ???, ???) + taxonomy <- createEntity(taxo) + organisation <- organisationSrv.getOrFail(authContext.organisation) + _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy +/* + + def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = + Try(startTraversal.getByNamespace(namespace)).getOrElse(startTraversal.limit(0)) +*/ + } object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) + def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) def visible(organisationIdOrName: EntityIdOrName): Traversal.V[Taxonomy] = @@ -49,18 +61,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { - case (taxonomy, tags) => - val predicates = tags.map(t => Predicate(t.predicate)).distinct - val values = predicates.map { p => - val tagValues = tags - .filter(_.predicate == p.value) - .filter(_.value.isDefined) - .map(_.value.get) - Value(p.value, tagValues) - } - RichTaxonomy(taxonomy, predicates, values) - } - + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } } } From 08c0a5c5280a5fc78cc0b8aa8f94296c5503c6a6 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 15:18:24 +0100 Subject: [PATCH 04/93] WIP create taxonomiy / get works --- .../thp/thehive/dto/v1/CustomFieldValue.scala | 1 + .../org/thp/thehive/dto/v1/Taxonomy.scala | 30 +++++- .../thehive/controllers/v1/Conversion.scala | 10 +- .../thp/thehive/controllers/v1/Router.scala | 8 ++ .../thehive/controllers/v1/TaxonomyCtrl.scala | 93 +++++++++++-------- .../org/thp/thehive/models/Permissions.scala | 2 +- .../models/TheHiveSchemaDefinition.scala | 17 +++- .../thehive/services/OrganisationSrv.scala | 10 +- .../thp/thehive/services/TaxonomySrv.scala | 8 +- 9 files changed, 125 insertions(+), 54 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala index 6e72438d06..06d6fb16e4 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala @@ -70,6 +70,7 @@ object InputCustomFieldValue { } case _ => Good(Nil) } + implicit val writes: Writes[Seq[InputCustomFieldValue]] = Writes[Seq[InputCustomFieldValue]] { icfv => val fields = icfv.map { case InputCustomFieldValue(name, Some(s: String), _) => name -> JsString(s) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index a2d05e879c..f0ebfb9659 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,7 +2,11 @@ package org.thp.thehive.dto.v1 import java.util.Date -import play.api.libs.json.{Json, OFormat, OWrites} +import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable +import org.scalactic.{Bad, Good, One} +import org.thp.scalligraph.InvalidFormatAttributeError +import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} +import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} // TODO make sure of input format case class InputTaxonomy ( @@ -17,6 +21,20 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) +object InputEntry { + implicitly[FieldsParser[Option[Seq[InputEntry]]]] + + implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] + + implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] +} + +object InputValue { + implicit val parser: FieldsParser[InputValue] = FieldsParser[InputValue] + + implicit val writes: Writes[InputValue] = Json.writes[InputValue] +} + object InputTaxonomy { implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] } @@ -32,7 +50,7 @@ case class OutputTaxonomy( description: String, version: Int, predicates: Seq[String], - values: Option[Seq[OutputEntry]] + values: Seq[OutputEntry] ) case class OutputEntry(predicate: String, entry: Seq[OutputValue]) @@ -41,4 +59,12 @@ case class OutputValue(value: String, expanded: String) object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} + +object OutputEntry { + implicit val format: OFormat[OutputEntry] = Json.format[OutputEntry] +} + +object OutputValue { + implicit val format: OFormat[OutputValue] = Json.format[OutputValue] } \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 27e11ab729..2ee833e562 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -266,15 +266,19 @@ object Conversion { implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( _.into[OutputTaxonomy] + .withFieldComputed(_._id, _._id.toString) + .withFieldConst(_._type, "Taxonomy") .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { val outputValues = entryMap.getOrElse(tag.predicate, Seq()) - val value = OutputValue(tag.value.getOrElse(""), tag.description.getOrElse("")) - entryMap + (tag.predicate -> (outputValues :+ value)) - }).map(e => OutputEntry(e._1, e._2))) + if (tag.value.isDefined) + entryMap + (tag.predicate -> (outputValues :+ OutputValue(tag.value.get, tag.description.getOrElse("")))) + else + entryMap + (tag.predicate -> outputValues) + }).map(e => OutputEntry(e._1, e._2)).toSeq) .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index feffe865bb..1683c010ad 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -14,6 +14,7 @@ class Router @Inject() ( taskCtrl: TaskCtrl, customFieldCtrl: CustomFieldCtrl, alertCtrl: AlertCtrl, + taxonomyCtrl: TaxonomyCtrl, auditCtrl: AuditCtrl, statusCtrl: StatusCtrl, authenticationCtrl: AuthenticationCtrl, @@ -90,6 +91,13 @@ class Router @Inject() ( // DELETE /alert/:alertId controllers.AlertCtrl.delete(alertId) // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) + case GET(p"/taxonomy") => taxonomyCtrl.list + case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) + case POST(p"/taxonomy") => taxonomyCtrl.create + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + // case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.activate + // case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.deactivate + case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) // GET /audit controllers.AuditCtrl.find() diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index d60db4d141..6a8220514c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -2,7 +2,7 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} import org.thp.scalligraph.{EntityIdOrName, RichSeq} -import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs @@ -15,7 +15,7 @@ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.mvc.{Action, AnyContent, Results} -import scala.+: +import scala.util.Success class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -42,55 +42,73 @@ class TaxonomyCtrl @Inject() ( Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( "page", FieldsParser[OutputParam], - (range, traversal, authContext) => - traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy(authContext)) + (range, traversal, _) => + traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy) ) override val outputQuery: Query = - Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, authContext) => - traversal.richTaxonomy(authContext) + Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, _) => + traversal.richTaxonomy ) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags) ) - def importTaxonomy: Action[AnyContent] = - entrypoint("import taxonomy") - .extract("file", FieldsParser.file.optional.on("file")) - .extract("taxonomy", FieldsParser[InputTaxonomy]) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val file: Option[FFile] = request.body("file") - val inputTaxo: InputTaxonomy = request.body("taxonomy") - - // TODO Parse file & combine with body + def list: Action[AnyContent] = + entrypoint("list taxonomies") + .authRoTransaction(db) { implicit request => implicit graph => + val taxos = taxonomySrv + .startTraversal + .visible + .richTaxonomy + .toSeq + Success(Results.Ok(taxos.toJson)) + } - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + def create: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val inputTaxo: InputTaxonomy = request.body("taxonomy") - // Create tags - val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => - Tag(inputTaxo.namespace, - value.predicate, - Some(e.value), - Some(e.expanded), - e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour)) + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) - ) - } - ) + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + Some(e.expanded), + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) + ) + ) + }) - // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) - tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) - ) + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val allTags = tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + ) - for { - tagsEntities <- tags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) } + def get(taxonomyId: String): Action[AnyContent] = + entrypoint("get taxonomy") + .authRoTransaction(db) { implicit request => implicit graph => + taxonomySrv + .get(EntityIdOrName(taxonomyId)) + .visible + .richTaxonomy + .getOrFail("Taxonomy") + .map(taxonomy => Results.Ok(taxonomy.toJson)) + } + +/* def delete(namespace: String): Action[AnyContent] = entrypoint("delete taxonomy") .authTransaction(db) { implicit request => implicit graph => @@ -99,5 +117,6 @@ class TaxonomyCtrl @Inject() ( } yield Results.Nocontent } +*/ } diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index 81ea621740..7c079860de 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,7 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") - lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "organisation", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 38953a7a51..0baa6d6961 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -1,6 +1,7 @@ package org.thp.thehive.models import java.lang.reflect.Modifier +import java.util.Date import javax.inject.{Inject, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph @@ -82,7 +83,12 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { db.tryTransaction { g => db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => val taxoVertex = g.addVertex("Taxonomy") - taxoVertex.property("namespace", "Custom") + taxoVertex.property("_label", "Taxonomy") + taxoVertex.property("_createdBy", "???") // TODO What user should be used ? + taxoVertex.property("_createdAt", new Date()) + taxoVertex.property("namespace", "custom") + taxoVertex.property("description", "Custom taxonomy") + taxoVertex.property("version", 1) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) } @@ -97,13 +103,18 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), _.in("AlertOrganisation").out("AlertTag"), _.in("CaseTemplateOrganisation").out("CaseTemplateTag") - ).toSeq.foreach(tag => + ).toSeq.foreach { tag => + tag.property("namespace", "custom") customTaxo.addEdge("TaxonomyTag", tag) - ) + } Success(()) } }.map(_ => ()) } + .updateGraph("Add manageTaxonomy to org-admin profile", "Profile") { traversal => + Try(traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageTaxonomy").iterate()) + Success(()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index e74b249a54..026f8a9554 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -3,7 +3,7 @@ package org.thp.thehive.services import java.util.{Map => JMap} import akka.actor.ActorRef -import javax.inject.{Inject, Named, Singleton} +import javax.inject.{Inject, Named, Provider, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.{AuthContext, Permission} import org.thp.scalligraph.models._ @@ -23,6 +23,7 @@ import scala.util.{Failure, Success, Try} @Singleton class OrganisationSrv @Inject() ( + taxonomySrvProvider: Provider[TaxonomySrv], roleSrv: RoleSrv, profileSrv: ProfileSrv, auditSrv: AuditSrv, @@ -31,9 +32,9 @@ class OrganisationSrv @Inject() ( )(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Organisation] { - - val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] - val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] + lazy val taxonomySrv: TaxonomySrv = taxonomySrvProvider.get + val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] + val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { integrityCheckActor ! IntegrityCheckActor.EntityAdded("Organisation") @@ -51,6 +52,7 @@ class OrganisationSrv @Inject() ( def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = for { createdOrganisation <- createEntity(e) + _ <- taxonomySrv.create(Taxonomy("custom", "Custom taxonomy", 1), Seq()) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 8a7e906979..55117defea 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -2,7 +2,7 @@ package org.thp.thehive.services import java.util.{Map => JMap} -import javax.inject.{Inject, Named} +import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.{Database, Entity} @@ -17,8 +17,7 @@ import scala.util.Try @Singleton class TaxonomySrv @Inject() ( - organisationSrv: OrganisationSrv, - tagSrv: TagSrv + organisationSrv: OrganisationSrv )(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Taxonomy] { @@ -33,6 +32,7 @@ class TaxonomySrv @Inject() ( _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + /* def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = @@ -55,7 +55,7 @@ object TaxonomyOps { def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag] - def richTaxonomy(implicit authContext: AuthContext): Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = + def richTaxonomy: Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = traversal .project( _.by From a6cfdb3eb970e6f6a2cd2313976aae480c7f4891 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 16:47:17 +0100 Subject: [PATCH 05/93] WIP Custom taxonomy when new organisation is created --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 2 -- .../thp/thehive/models/TheHiveSchemaDefinition.scala | 2 +- .../app/org/thp/thehive/services/OrganisationSrv.scala | 2 +- thehive/app/org/thp/thehive/services/TaxonomySrv.scala | 10 +++++++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index f0ebfb9659..a73243a59e 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -22,8 +22,6 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) object InputEntry { - implicitly[FieldsParser[Option[Seq[InputEntry]]]] - implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 0baa6d6961..5bf2b0ec70 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -84,7 +84,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") - taxoVertex.property("_createdBy", "???") // TODO What user should be used ? + taxoVertex.property("_createdBy", "system@thehive.local") taxoVertex.property("_createdAt", new Date()) taxoVertex.property("namespace", "custom") taxoVertex.property("description", "Custom taxonomy") diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 026f8a9554..e2a8c61068 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -52,7 +52,7 @@ class OrganisationSrv @Inject() ( def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = for { createdOrganisation <- createEntity(e) - _ <- taxonomySrv.create(Taxonomy("custom", "Custom taxonomy", 1), Seq()) + _ <- taxonomySrv.createWithOrg(Taxonomy("custom", "Custom taxonomy", 1), Seq(), createdOrganisation) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 55117defea..28734aefb9 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -26,8 +26,16 @@ class TaxonomySrv @Inject() ( def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - taxonomy <- createEntity(taxo) organisation <- organisationSrv.getOrFail(authContext.organisation) + richTaxonomy <- createWithOrg(taxo, tags, organisation) + } yield richTaxonomy + + def createWithOrg(taxo: Taxonomy, + tags: Seq[Tag with Entity], + organisation: Organisation with Entity) + (implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + for { + taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) From 80ffdd2aa17888fb295b5c0d9efd5f56375f769b Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 19:02:21 +0100 Subject: [PATCH 06/93] WIP Added taxonomy activate / deactivate --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 2 +- .../org/thp/thehive/controllers/v1/Conversion.scala | 2 ++ .../app/org/thp/thehive/controllers/v1/Router.scala | 6 +++--- .../org/thp/thehive/controllers/v1/TaxonomyCtrl.scala | 10 +++++++++- thehive/app/org/thp/thehive/models/Taxonomy.scala | 4 +++- .../thp/thehive/models/TheHiveSchemaDefinition.scala | 3 ++- .../app/org/thp/thehive/services/OrganisationSrv.scala | 6 ++++-- thehive/app/org/thp/thehive/services/TaxonomySrv.scala | 10 +++++++++- 8 files changed, 33 insertions(+), 10 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index a73243a59e..fe8eaa467c 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -8,7 +8,6 @@ import org.thp.scalligraph.InvalidFormatAttributeError import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} -// TODO make sure of input format case class InputTaxonomy ( namespace: String, description: String, @@ -47,6 +46,7 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, + enabled: Boolean, predicates: Seq[String], values: Seq[OutputEntry] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 2ee833e562..78f235c031 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -260,6 +260,7 @@ object Conversion { .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) + .withFieldConst(_.enabled, false) // TODO always false when importing a taxonomy ? .transform } @@ -271,6 +272,7 @@ object Conversion { .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) + .withFieldComputed(_.enabled, _.enabled) .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { val outputValues = entryMap.getOrElse(tag.predicate, Seq()) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 1683c010ad..7fd69f6291 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,9 +94,9 @@ class Router @Inject() ( case GET(p"/taxonomy") => taxonomyCtrl.list case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip - // case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.activate - // case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.deactivate + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip< + case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) + case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 6a8220514c..b2b3ff7136 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -70,7 +70,7 @@ class TaxonomyCtrl @Inject() ( .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => val inputTaxo: InputTaxonomy = request.body("taxonomy") - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) // Create tags val tagValues = inputTaxo.values.getOrElse(Seq()) @@ -108,6 +108,14 @@ class TaxonomyCtrl @Inject() ( .map(taxonomy => Results.Ok(taxonomy.toJson)) } + def setEnabled(taxonomyId: String, isEnabled: Boolean): Action[AnyContent] = + entrypoint("toggle taxonomy") + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + taxonomySrv + .setEnabled(EntityIdOrName(taxonomyId), isEnabled) + .map(_ => Results.NoContent) + } + /* def delete(namespace: String): Action[AnyContent] = entrypoint("delete taxonomy") diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 925956d9cd..7a8f9a46c2 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -9,7 +9,8 @@ import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} case class Taxonomy( namespace: String, description: String, - version: Int + version: Int, + enabled: Boolean ) @BuildEdgeEntity[Taxonomy, Tag] @@ -27,4 +28,5 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version + def enabled: Boolean = taxonomy.enabled } diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 5bf2b0ec70..3cca9b9b9f 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -89,6 +89,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("namespace", "custom") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) + taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) } @@ -97,7 +98,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "Custom").head + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "custom").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index e2a8c61068..05f3889499 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -49,12 +49,14 @@ class OrganisationSrv @Inject() ( _ <- roleSrv.create(user, createdOrganisation, profileSrv.orgAdmin) } yield createdOrganisation - def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = + def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { + val customTaxo = Taxonomy("custom", "Custom taxonomy", 1, enabled = true) for { createdOrganisation <- createEntity(e) - _ <- taxonomySrv.createWithOrg(Taxonomy("custom", "Custom taxonomy", 1), Seq(), createdOrganisation) + _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), createdOrganisation) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation + } def current(implicit graph: Graph, authContext: AuthContext): Traversal.V[Organisation] = get(authContext.organisation) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 28734aefb9..42003e4f54 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -9,7 +9,7 @@ import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{Converter, Traversal} -import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.{EntityId, EntityIdOrName, RichSeq} import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ @@ -41,6 +41,11 @@ class TaxonomySrv @Inject() ( richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = + for { + _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") + } yield () + /* def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = @@ -52,6 +57,9 @@ class TaxonomySrv @Inject() ( object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + def get(idOrName: EntityId): Traversal.V[Taxonomy] = + traversal.getByIds(idOrName) + def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) From 279f8af88c8410dc692814c93664c7de844366c5 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 19:08:08 +0100 Subject: [PATCH 07/93] Fixed taxonomy values parsing --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index fe8eaa467c..20890915a5 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,11 +2,7 @@ package org.thp.thehive.dto.v1 import java.util.Date -import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable -import org.scalactic.{Bad, Good, One} -import org.thp.scalligraph.InvalidFormatAttributeError -import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} -import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} +import play.api.libs.json.{Json, OFormat, OWrites, Writes} case class InputTaxonomy ( namespace: String, @@ -21,14 +17,10 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) object InputEntry { - implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] - implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] } object InputValue { - implicit val parser: FieldsParser[InputValue] = FieldsParser[InputValue] - implicit val writes: Writes[InputValue] = Json.writes[InputValue] } From 2e25d591ade69eadf3f3081ea338f4849fa7a6e7 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 11:42:50 +0100 Subject: [PATCH 08/93] Idempotent TheHive schema --- .../thp/thehive/controllers/v1/Router.scala | 2 +- .../models/TheHiveSchemaDefinition.scala | 27 +++++++++++-------- .../thehive/controllers/v1/UserCtrlTest.scala | 1 + 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 7fd69f6291..f3184dffdb 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,7 +94,7 @@ class Router @Inject() ( case GET(p"/taxonomy") => taxonomyCtrl.list case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip< + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 3cca9b9b9f..9edc5c41f1 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -81,17 +81,22 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .addVertexModel[String]("Taxonomy", Seq("namespace")) .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => db.tryTransaction { g => - db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => - val taxoVertex = g.addVertex("Taxonomy") - taxoVertex.property("_label", "Taxonomy") - taxoVertex.property("_createdBy", "system@thehive.local") - taxoVertex.property("_createdAt", new Date()) - taxoVertex.property("namespace", "custom") - taxoVertex.property("description", "Custom taxonomy") - taxoVertex.property("version", 1) - taxoVertex.property("enabled", true) - o.addEdge("OrganisationTaxonomy", taxoVertex) - Success(()) + // If there are no taxonomies in database, add a custom one for each organisation + db.labelFilter("Taxonomy")(Traversal.V()(g)).headOption match { + case None => + db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + val taxoVertex = g.addVertex("Taxonomy") + taxoVertex.property("_label", "Taxonomy") + taxoVertex.property("_createdBy", "system@thehive.local") + taxoVertex.property("_createdAt", new Date()) + taxoVertex.property("namespace", "custom") + taxoVertex.property("description", "Custom taxonomy") + taxoVertex.property("version", 1) + taxoVertex.property("enabled", true) + o.addEdge("OrganisationTaxonomy", taxoVertex) + Success(()) + } + case _ => Success(()) } }.map(_ => ()) } diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala index e7ac8f762c..dd68b7d3a9 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala @@ -109,6 +109,7 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder { Permissions.managePage, Permissions.manageObservable, Permissions.manageAlert, + Permissions.manageTaxonomy, Permissions.manageAction, Permissions.manageConfig ), From 49e9e0f65edbd43cf3defa91f0c1cc2229c1336b Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 14:23:35 +0100 Subject: [PATCH 09/93] Checked if taxonomy namespace is present before creating --- .../thp/thehive/controllers/v1/TaxonomyCtrl.scala | 15 +++++++++------ .../org/thp/thehive/services/TaxonomySrv.scala | 9 +++++++++ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index b2b3ff7136..844dbc311b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -1,7 +1,7 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} -import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ @@ -15,7 +15,7 @@ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.mvc.{Action, AnyContent, Results} -import scala.util.Success +import scala.util.{Failure, Success} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -91,10 +91,13 @@ class TaxonomyCtrl @Inject() ( Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) ) - for { - tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + else + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) } def get(taxonomyId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 42003e4f54..49ee30d3d3 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -24,6 +24,15 @@ class TaxonomySrv @Inject() ( val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] + def existsInOrganisation(namespace: String)(implicit graph: Graph, authContext: AuthContext): Boolean = { + startTraversal + .has(_.namespace, namespace) + .in[OrganisationTaxonomy] + .v[Organisation] + .has(_.name, authContext.organisation.toString) // TODO not great + .exists + } + def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { organisation <- organisationSrv.getOrFail(authContext.organisation) From cda163ddd0545697f814f2817119aa2ed3438742 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 15:09:28 +0100 Subject: [PATCH 10/93] Correct output format for taxonomies --- .../scala/org/thp/thehive/dto/v1/Tag.scala | 15 ++++++++++++++ .../org/thp/thehive/dto/v1/Taxonomy.scala | 15 +------------- .../thehive/controllers/v1/Conversion.scala | 20 +++++++++++-------- thehive/app/org/thp/thehive/models/Tag.scala | 19 ++++++++++++++++-- .../app/org/thp/thehive/models/Taxonomy.scala | 2 +- .../thp/thehive/services/TaxonomySrv.scala | 10 ++-------- 6 files changed, 48 insertions(+), 33 deletions(-) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala new file mode 100644 index 0000000000..3b536c867c --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala @@ -0,0 +1,15 @@ +package org.thp.thehive.dto.v1 + +import play.api.libs.json.{Json, OFormat} + +case class OutputTag( + namespace: String, + predicate: String, + value: Option[String], + description: Option[String], + colour: Int +) + +object OutputTag { + implicit val format: OFormat[OutputTag] = Json.format[OutputTag] +} diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 20890915a5..70f0b23208 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -39,22 +39,9 @@ case class OutputTaxonomy( description: String, version: Int, enabled: Boolean, - predicates: Seq[String], - values: Seq[OutputEntry] + tags: Seq[OutputTag] ) -case class OutputEntry(predicate: String, entry: Seq[OutputValue]) - -case class OutputValue(value: String, expanded: String) - object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] } - -object OutputEntry { - implicit val format: OFormat[OutputEntry] = Json.format[OutputEntry] -} - -object OutputValue { - implicit val format: OFormat[OutputValue] = Json.format[OutputValue] -} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 78f235c031..05e63d0bdf 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -273,14 +273,18 @@ object Conversion { .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) .withFieldComputed(_.enabled, _.enabled) - .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) - .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { - val outputValues = entryMap.getOrElse(tag.predicate, Seq()) - if (tag.value.isDefined) - entryMap + (tag.predicate -> (outputValues :+ OutputValue(tag.value.get, tag.description.getOrElse("")))) - else - entryMap + (tag.predicate -> outputValues) - }).map(e => OutputEntry(e._1, e._2)).toSeq) + .withFieldComputed(_.tags, _.tags.map(_.toOutput)) + .transform + ) + + implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = + Renderer.toJson[RichTag, OutputTag]( + _.into[OutputTag] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.predicate, _.predicate) + .withFieldComputed(_.value, _.value) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.colour, _.colour) .transform ) diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala index e188ee45c2..cc97dc317e 100644 --- a/thehive/app/org/thp/thehive/models/Tag.scala +++ b/thehive/app/org/thp/thehive/models/Tag.scala @@ -1,7 +1,9 @@ package org.thp.thehive.models -import org.thp.scalligraph.BuildVertexEntity -import org.thp.scalligraph.models.{DefineIndex, IndexType} +import java.util.Date + +import org.thp.scalligraph.{BuildVertexEntity, EntityId} +import org.thp.scalligraph.models.{DefineIndex, Entity, IndexType} import play.api.Logger import scala.util.Try @@ -54,3 +56,16 @@ object Tag { } } } + +case class RichTag(tag: Tag with Entity) { + def _id: EntityId = tag._id + def _createdBy: String = tag._createdBy + def _updatedBy: Option[String] = tag._updatedBy + def _createdAt: Date = tag._createdAt + def _updatedAt: Option[Date] = tag._updatedAt + def namespace: String = tag.namespace + def predicate: String = tag.predicate + def value: Option[String] = tag.value + def description: Option[String] = tag.description + def colour: Int = tag.colour +} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 7a8f9a46c2..a7815963e6 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -18,7 +18,7 @@ case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - tags: Seq[Tag with Entity] + tags: Seq[RichTag] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 49ee30d3d3..1b29f20081 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -47,7 +47,7 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags.map(RichTag))) } yield richTaxonomy def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = @@ -55,12 +55,6 @@ class TaxonomySrv @Inject() ( _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") } yield () -/* - - def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = - Try(startTraversal.getByNamespace(namespace)).getOrElse(startTraversal.limit(0)) -*/ - } object TaxonomyOps { @@ -86,6 +80,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags.map(RichTag)) } } } From eb91a5b1df0d231e98b7c64f240b19fd84fd5907 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 15:31:29 +0100 Subject: [PATCH 11/93] Query for taxonomies --- thehive/app/org/thp/thehive/controllers/v1/Router.scala | 3 +-- .../org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index f3184dffdb..1d49363c75 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -91,10 +91,9 @@ class Router @Inject() ( // DELETE /alert/:alertId controllers.AlertCtrl.delete(alertId) // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) - case GET(p"/taxonomy") => taxonomyCtrl.list - case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index bbc3b86b81..27fed93b12 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -32,6 +32,7 @@ class TheHiveQueryExecutor @Inject() ( profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, userCtrl: UserCtrl, + taxonomyCtrl: TaxonomyCtrl, // dashboardCtrl: DashboardCtrl, properties: Properties, @Named("with-thehive-schema") implicit val db: Database @@ -53,7 +54,8 @@ class TheHiveQueryExecutor @Inject() ( profileCtrl, // tagCtrl, taskCtrl, - userCtrl + userCtrl, + taxonomyCtrl ) override val version: (Int, Int) = 1 -> 1 From 858ba48a0b6e206800f1e4b32c029bfc474880c5 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 18 Nov 2020 12:13:01 +0100 Subject: [PATCH 12/93] Basic zip import --- .../org/thp/thehive/dto/v1/Taxonomy.scala | 49 +++++++--- .../thp/thehive/controllers/v1/Router.scala | 2 +- .../thehive/controllers/v1/TaxonomyCtrl.scala | 91 +++++++++++-------- 3 files changed, 90 insertions(+), 52 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 70f0b23208..576683127a 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,30 +2,57 @@ package org.thp.thehive.dto.v1 import java.util.Date -import play.api.libs.json.{Json, OFormat, OWrites, Writes} +import play.api.libs.json.{Json, OFormat} -case class InputTaxonomy ( +/* +Format based on : +https://tools.ietf.org/id/draft-dulaunoy-misp-taxonomy-format-04.html +*/ + +case class InputTaxonomy( namespace: String, description: String, version: Int, - predicates: Seq[String], - values: Option[Seq[InputEntry]] + `type`: Option[Seq[String]], + exclusive: Option[Boolean], + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate( + value: String, + expanded: Option[String], + exclusive: Option[Boolean], + description: Option[String] ) -case class InputEntry(predicate: String, entry: Seq[InputValue]) +case class InputValue( + predicate: String, + entry: Seq[InputEntry] +) -case class InputValue(value: String, expanded: String, colour: Option[String]) +case class InputEntry( + value: String, + expanded: Option[String], + colour: Option[String], + description: Option[String], + numerical_value: Option[Int] +) -object InputEntry { - implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] +object InputTaxonomy { + implicit val format: OFormat[InputTaxonomy] = Json.format[InputTaxonomy] +} + +object InputPredicate { + implicit val format: OFormat[InputPredicate] = Json.format[InputPredicate] } object InputValue { - implicit val writes: Writes[InputValue] = Json.writes[InputValue] + implicit val format: OFormat[InputValue] = Json.format[InputValue] } -object InputTaxonomy { - implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +object InputEntry { + implicit val format: OFormat[InputEntry] = Json.format[InputEntry] } case class OutputTaxonomy( diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 1d49363c75..324df8eb8d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -92,7 +92,7 @@ class Router @Inject() ( // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 844dbc311b..5946240540 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -1,21 +1,25 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} -import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} -import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} +import net.lingala.zip4j.ZipFile +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ -import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.TraversalOps.{TraversalOpsDefs, logger} import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} -import scala.util.{Failure, Success} +import scala.util.{Failure, Success, Try} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -68,36 +72,54 @@ class TaxonomyCtrl @Inject() ( entrypoint("import taxonomy") .extract("taxonomy", FieldsParser[InputTaxonomy]) .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val inputTaxo: InputTaxonomy = request.body("taxonomy") + for { + richTaxonomy <- createFromInput(request.body("taxonomy")) + } yield Results.Created(richTaxonomy.toJson) + } - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) + def importZip: Action[AnyContent] = + entrypoint("import taxonomies zip") + .extract("file", FieldsParser.file.on("file")) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: FFile = request.body("file") + val zipFile = new ZipFile(file.filepath.toString) + zipFile.getFileHeaders.stream.forEach { fileHeader => + val json = Json.parse(zipFile.getInputStream(fileHeader)) + createFromInput(json.as[InputTaxonomy]) + } + + Success(Results.NoContent) + } - // Create tags - val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => - Tag(inputTaxo.namespace, - value.predicate, - Some(e.value), - Some(e.expanded), - e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) - ) - ) - }) + private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) - // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) - val allTags = tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + e.expanded, + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) ) + ) + }) - if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) - Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) - else - for { - tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val allTags = tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p.value, None, None, tagSrv.defaultColour) + ) + + if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + else + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield richTaxonomy } def get(taxonomyId: String): Action[AnyContent] = @@ -119,15 +141,4 @@ class TaxonomyCtrl @Inject() ( .map(_ => Results.NoContent) } -/* - def delete(namespace: String): Action[AnyContent] = - entrypoint("delete taxonomy") - .authTransaction(db) { implicit request => implicit graph => - for { - t <- taxonomySrv.getByNamespace(namespace) - - } yield Results.Nocontent - } -*/ - } From 366ef7b0e0ec857f6fc83fd4463926bbd9e94f57 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 18 Nov 2020 17:32:51 +0100 Subject: [PATCH 13/93] Handled zip import errors --- .../thehive/controllers/v1/Conversion.scala | 14 +-------- .../thehive/controllers/v1/Properties.scala | 3 +- .../thehive/controllers/v1/TaxonomyCtrl.scala | 31 +++++++++++++------ .../models/TheHiveSchemaDefinition.scala | 2 +- .../thp/thehive/services/TaxonomySrv.scala | 2 +- 5 files changed, 26 insertions(+), 26 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 05e63d0bdf..154f7cb790 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -257,10 +257,7 @@ object Conversion { def toTaxonomy: Taxonomy = inputTaxonomy .into[Taxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldConst(_.enabled, false) // TODO always false when importing a taxonomy ? + .withFieldConst(_.enabled, false) .transform } @@ -269,10 +266,6 @@ object Conversion { _.into[OutputTaxonomy] .withFieldComputed(_._id, _._id.toString) .withFieldConst(_._type, "Taxonomy") - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.enabled, _.enabled) .withFieldComputed(_.tags, _.tags.map(_.toOutput)) .transform ) @@ -280,11 +273,6 @@ object Conversion { implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = Renderer.toJson[RichTag, OutputTag]( _.into[OutputTag] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.predicate, _.predicate) - .withFieldComputed(_.value, _.value) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.colour, _.colour) .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index a7d78bc02a..f3b2bb997d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -381,8 +381,7 @@ class Properties @Inject() ( .property("namespace", UMapping.string)(_.field.readonly) .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) - // Predicates ? - // Values ? + .property("enabled", UMapping.boolean)(_.field.readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 5946240540..5a48e28d8a 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -2,14 +2,15 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} import net.lingala.zip4j.ZipFile +import net.lingala.zip4j.model.FileHeader import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ -import org.thp.scalligraph.traversal.TraversalOps.{TraversalOpsDefs, logger} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} -import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} +import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} @@ -19,6 +20,7 @@ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} +import scala.collection.JavaConverters._ import scala.util.{Failure, Success, Try} class TaxonomyCtrl @Inject() ( @@ -80,17 +82,28 @@ class TaxonomyCtrl @Inject() ( def importZip: Action[AnyContent] = entrypoint("import taxonomies zip") .extract("file", FieldsParser.file.on("file")) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + .authPermitted(Permissions.manageTaxonomy) { implicit request => val file: FFile = request.body("file") val zipFile = new ZipFile(file.filepath.toString) - zipFile.getFileHeaders.stream.forEach { fileHeader => - val json = Json.parse(zipFile.getInputStream(fileHeader)) - createFromInput(json.as[InputTaxonomy]) - } + val headers = zipFile + .getFileHeaders + .iterator() + .asScala - Success(Results.NoContent) + for { + inputTaxos <- headers.toTry(h => parseJsonFile(zipFile, h)) + richTaxos <- db.tryTransaction { implicit graph => + inputTaxos.toTry(inputTaxo => createFromInput(inputTaxo)).map(_.toJson) + } + } yield Results.Created(richTaxos) } + private def parseJsonFile(zipFile: ZipFile, h: FileHeader): Try[InputTaxonomy] = { + Try(Json.parse(zipFile.getInputStream(h)).as[InputTaxonomy]).recoverWith { + case _ => Failure(BadRequestError(s"File '${h.getFileName}' does not comply with the MISP taxonomy formatting")) + } + } + private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) @@ -114,7 +127,7 @@ class TaxonomyCtrl @Inject() ( ) if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) - Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation")) else for { tagsEntities <- allTags.toTry(t => tagSrv.create(t)) diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 9edc5c41f1..e109b6ffd7 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -122,7 +122,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { Success(()) } - val reflectionClasses = new Reflections( + val reflectionClasses = new Reflections( new ConfigurationBuilder() .forPackages("org.thp.thehive.models") .addClassLoader(getClass.getClassLoader) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 1b29f20081..062d96b4e4 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -29,7 +29,7 @@ class TaxonomySrv @Inject() ( .has(_.namespace, namespace) .in[OrganisationTaxonomy] .v[Organisation] - .has(_.name, authContext.organisation.toString) // TODO not great + .current .exists } From 2c7e887ea369b43a2d3b000edff53b6b215159bc Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 19 Nov 2020 11:18:59 +0100 Subject: [PATCH 14/93] Review changes --- .../thp/thehive/controllers/v1/Conversion.scala | 4 ++-- .../thp/thehive/controllers/v1/TaxonomyCtrl.scala | 8 +++----- thehive/app/org/thp/thehive/models/Tag.scala | 13 ------------- thehive/app/org/thp/thehive/models/Taxonomy.scala | 2 +- .../thehive/models/TheHiveSchemaDefinition.scala | 14 +++++++------- .../app/org/thp/thehive/services/TaxonomySrv.scala | 4 ++-- 6 files changed, 15 insertions(+), 30 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 154f7cb790..5ef735620f 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -270,8 +270,8 @@ object Conversion { .transform ) - implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = - Renderer.toJson[RichTag, OutputTag]( + implicit val tagOutput: Renderer.Aux[Tag, OutputTag] = + Renderer.toJson[Tag, OutputTag]( _.into[OutputTag] .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 5a48e28d8a..10c91d6974 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -105,12 +105,10 @@ class TaxonomyCtrl @Inject() ( } private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) - // Create tags val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => + val tags = tagValues.flatMap(value => { + value.entry.map(e => Tag(inputTaxo.namespace, value.predicate, Some(e.value), @@ -131,7 +129,7 @@ class TaxonomyCtrl @Inject() ( else for { tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + richTaxonomy <- taxonomySrv.create(inputTaxo.toTaxonomy, tagsEntities) } yield richTaxonomy } diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala index cc97dc317e..3ad58979a5 100644 --- a/thehive/app/org/thp/thehive/models/Tag.scala +++ b/thehive/app/org/thp/thehive/models/Tag.scala @@ -56,16 +56,3 @@ object Tag { } } } - -case class RichTag(tag: Tag with Entity) { - def _id: EntityId = tag._id - def _createdBy: String = tag._createdBy - def _updatedBy: Option[String] = tag._updatedBy - def _createdAt: Date = tag._createdAt - def _updatedAt: Option[Date] = tag._updatedAt - def namespace: String = tag.namespace - def predicate: String = tag.predicate - def value: Option[String] = tag.value - def description: Option[String] = tag.description - def colour: Int = tag.colour -} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index a7815963e6..bc4fb1a6d4 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -18,7 +18,7 @@ case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - tags: Seq[RichTag] + tags: Seq[Tag] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index e109b6ffd7..100ce9967b 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -80,11 +80,11 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { // Taxonomies .addVertexModel[String]("Taxonomy", Seq("namespace")) .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => - db.tryTransaction { g => - // If there are no taxonomies in database, add a custom one for each organisation - db.labelFilter("Taxonomy")(Traversal.V()(g)).headOption match { - case None => - db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + db.tryTransaction { implicit g => + // For each organisation, if there is no custom taxonomy, create it + db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "custom").headOption match { + case None => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") taxoVertex.property("_createdBy", "system@thehive.local") @@ -95,8 +95,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) - } - case _ => Success(()) + case _ => Success(()) + } } }.map(_ => ()) } diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 062d96b4e4..4b4b7c28a2 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -47,7 +47,7 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags.map(RichTag))) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = @@ -80,6 +80,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags.map(RichTag)) } + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } } } From 114601067ef9da32209e2806ebd4f6304599ce9e Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 09:46:17 +0100 Subject: [PATCH 15/93] Added (de)activation & deletion --- .../org/thp/thehive/dto/v1/Taxonomy.scala | 1 - .../thehive/controllers/v1/Conversion.scala | 1 - .../thehive/controllers/v1/Properties.scala | 1 - .../thp/thehive/controllers/v1/Router.scala | 5 +-- .../thehive/controllers/v1/TaxonomyCtrl.scala | 31 ++++++++++++++----- .../org/thp/thehive/models/Permissions.scala | 2 +- .../app/org/thp/thehive/models/Taxonomy.scala | 4 +-- .../models/TheHiveSchemaDefinition.scala | 19 ++++++++++-- .../thehive/services/OrganisationSrv.scala | 13 +++++--- .../thp/thehive/services/TaxonomySrv.scala | 27 +++++++++++++--- 10 files changed, 76 insertions(+), 28 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 576683127a..7081347184 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -65,7 +65,6 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, - enabled: Boolean, tags: Seq[OutputTag] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 5ef735620f..850521f156 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -257,7 +257,6 @@ object Conversion { def toTaxonomy: Taxonomy = inputTaxonomy .into[Taxonomy] - .withFieldConst(_.enabled, false) .transform } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index f3b2bb997d..e8af8ccd96 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -381,7 +381,6 @@ class Properties @Inject() ( .property("namespace", UMapping.string)(_.field.readonly) .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) - .property("enabled", UMapping.boolean)(_.field.readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 324df8eb8d..f3bd9e882b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,8 +94,9 @@ class Router @Inject() ( case POST(p"/taxonomy") => taxonomyCtrl.create case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) - case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) - case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) + case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.toggleActivation(taxoId, isActive = true) + case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.toggleActivation(taxoId, isActive = false) + case DELETE(p"/taxonomy/$taxoId") => taxonomyCtrl.delete(taxoId) case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 10c91d6974..e81c47a098 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -119,12 +119,16 @@ class TaxonomyCtrl @Inject() ( }) // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val predicateWithNoTags = inputTaxo.predicates.map(_.value).diff(tagValues.map(_.predicate)) val allTags = tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p.value, None, None, tagSrv.defaultColour) + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) ) - if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + if (inputTaxo.namespace.isEmpty) + Failure(BadRequestError(s"A taxonomy with no namespace cannot be imported")) + else if (inputTaxo.namespace == "_freetags") + Failure(BadRequestError(s"Namespace _freetags is restricted for TheHive")) + else if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation")) else for { @@ -144,12 +148,25 @@ class TaxonomyCtrl @Inject() ( .map(taxonomy => Results.Ok(taxonomy.toJson)) } - def setEnabled(taxonomyId: String, isEnabled: Boolean): Action[AnyContent] = + def toggleActivation(taxonomyId: String, isActive: Boolean): Action[AnyContent] = entrypoint("toggle taxonomy") .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - taxonomySrv - .setEnabled(EntityIdOrName(taxonomyId), isEnabled) - .map(_ => Results.NoContent) + val toggleF = if (isActive) taxonomySrv.activate _ else taxonomySrv.deactivate _ + toggleF(EntityIdOrName(taxonomyId)).map(_ => Results.NoContent) + } + + def delete(taxoId: String): Action[AnyContent] = + entrypoint("delete taxonomy") + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + for { + taxo <- taxonomySrv + .get(EntityIdOrName(taxoId)) + .visible + .getOrFail("Taxonomy") + tags <- Try(taxonomySrv.get(taxo).tags.toSeq) + _ <- tags.toTry(t => tagSrv.delete(t)) + _ <- taxonomySrv.delete(taxo) + } yield Results.NoContent } } diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index 7c079860de..81ea621740 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,7 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") - lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "organisation", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index bc4fb1a6d4..e5fcdb0c03 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -9,8 +9,7 @@ import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} case class Taxonomy( namespace: String, description: String, - version: Int, - enabled: Boolean + version: Int ) @BuildEdgeEntity[Taxonomy, Tag] @@ -28,5 +27,4 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version - def enabled: Boolean = taxonomy.enabled } diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 100ce9967b..246d149474 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -89,7 +89,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("_label", "Taxonomy") taxoVertex.property("_createdBy", "system@thehive.local") taxoVertex.property("_createdAt", new Date()) - taxoVertex.property("namespace", "custom") + taxoVertex.property("namespace", "_freetags") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) taxoVertex.property("enabled", true) @@ -103,14 +103,22 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "custom").head + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "_freetags").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), _.in("AlertOrganisation").out("AlertTag"), _.in("CaseTemplateOrganisation").out("CaseTemplateTag") ).toSeq.foreach { tag => - tag.property("namespace", "custom") + // Create a freetext tag and store it into predicate + val tagStr = tagString( + tag.property("namespace").value().toString, + tag.property("predicate").value().toString, + tag.property("value").value().toString + ) + tag.property("namespace", "_freetags") + tag.property("predicate", tagStr) + tag.property("value").remove() customTaxo.addEdge("TaxonomyTag", tag) } Success(()) @@ -148,5 +156,10 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { case vertexModel: VertexModel => vertexModel.getInitialValues }.flatten + private def tagString(namespace: String, predicate: String, value: String): String = + (if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') + + (if (predicate.headOption.getOrElse('_') == '_') "" else predicate) + + (if (value.isEmpty) "" else f"""="$value"""") + override def init(db: Database)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = Success(()) } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 05f3889499..0a30d74c74 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -35,6 +35,7 @@ class OrganisationSrv @Inject() ( lazy val taxonomySrv: TaxonomySrv = taxonomySrvProvider.get val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] + val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { integrityCheckActor ! IntegrityCheckActor.EntityAdded("Organisation") @@ -50,12 +51,14 @@ class OrganisationSrv @Inject() ( } yield createdOrganisation def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { - val customTaxo = Taxonomy("custom", "Custom taxonomy", 1, enabled = true) + val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) + val activeTaxos = getByName("admin").taxonomies.toSeq for { - createdOrganisation <- createEntity(e) - _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), createdOrganisation) - _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) - } yield createdOrganisation + newOrga <- createEntity(e) + _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), newOrga) + _ <- activeTaxos.toTry(t => organisationTaxonomySrv.create(OrganisationTaxonomy(), newOrga, t)) + _ <- auditSrv.organisation.create(newOrga, newOrga.toJson) + } yield newOrga } def current(implicit graph: Graph, authContext: AuthContext): Traversal.V[Organisation] = get(authContext.organisation) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 4b4b7c28a2..b172b44c6b 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -12,8 +12,9 @@ import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.scalligraph.{EntityId, EntityIdOrName, RichSeq} import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ -import scala.util.Try +import scala.util.{Success, Try} @Singleton class TaxonomySrv @Inject() ( @@ -50,10 +51,28 @@ class TaxonomySrv @Inject() ( richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy - def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = + override def getByName(name: String)(implicit graph: Graph): Traversal.V[Taxonomy] = + Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0)) + + def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + for { + taxo <- get(taxonomyId).getOrFail("Taxonomy") + organisations <- Try(organisationSrv.startTraversal.filter(_ + .out[OrganisationTaxonomy] + .filter(_.unsafeHas("namespace", taxo.namespace)) + ).toSeq) + _ <- organisations.toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) + } yield Success(()) + + def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { - _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") - } yield () + taxo <- get(taxonomyId).getOrFail("Taxonomy") + _ <- Try(organisationSrv + .get(authContext.organisation) + .outE[OrganisationTaxonomy] + .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) + .remove()) + } yield Success(()) } From 5e88dba40ab07158a1964e80c44ec2420b412e9b Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 10:26:59 +0100 Subject: [PATCH 16/93] Used correct Scalligraph commit --- ScalliGraph | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ScalliGraph b/ScalliGraph index e95b44aafa..1346ea3588 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit e95b44aafa9269a723903204f9a1676fbbfab698 +Subproject commit 1346ea3588009e8393c7f79180af35be01efa097 From bc1b507880bbd51aea2695d1b8d5030464a31890 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 10:39:38 +0100 Subject: [PATCH 17/93] Edit drone.yml --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 16354f6ca6..2b2ebfffeb 100644 --- a/.drone.yml +++ b/.drone.yml @@ -8,7 +8,7 @@ steps: - name: submodules image: alpine/git commands: - - git submodule update --recursive --init --remote + - git submodule update --recursive --init # Restore cache of downloaded dependencies - name: restore-cache From e4d43cbe29271fdf56a38a0b2aaa351252a3dbde Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 17:07:18 +0100 Subject: [PATCH 18/93] Fixed schema erros & (de)activation --- ScalliGraph | 2 +- .../models/TheHiveSchemaDefinition.scala | 11 +- .../thehive/services/OrganisationSrv.scala | 3 +- .../thp/thehive/services/TaxonomySrv.scala | 24 +-- .../org/thp/thehive/DatabaseBuilder.scala | 5 + .../controllers/v1/TaxonomyCtrlTest.scala | 204 ++++++++++++++++++ .../resources/data/OrganisationTaxonomy.json | 5 + thehive/test/resources/data/Tag.json | 7 + thehive/test/resources/data/Taxonomy.json | 8 + thehive/test/resources/data/TaxonomyTag.json | 3 + .../test/resources/machinetag-badformat.zip | Bin 0 -> 4274 bytes .../test/resources/machinetag-otherfiles.zip | Bin 0 -> 3841 bytes thehive/test/resources/machinetag-present.zip | Bin 0 -> 3941 bytes thehive/test/resources/machinetag.zip | Bin 0 -> 4076 bytes 14 files changed, 252 insertions(+), 20 deletions(-) create mode 100644 thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala create mode 100644 thehive/test/resources/data/OrganisationTaxonomy.json create mode 100644 thehive/test/resources/data/Taxonomy.json create mode 100644 thehive/test/resources/data/TaxonomyTag.json create mode 100644 thehive/test/resources/machinetag-badformat.zip create mode 100644 thehive/test/resources/machinetag-otherfiles.zip create mode 100644 thehive/test/resources/machinetag-present.zip create mode 100644 thehive/test/resources/machinetag.zip diff --git a/ScalliGraph b/ScalliGraph index 1346ea3588..1a55a0db73 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit 1346ea3588009e8393c7f79180af35be01efa097 +Subproject commit 1a55a0db730460c6f548695251248934196b6ecc diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 246d149474..a509a89059 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -4,6 +4,7 @@ import java.lang.reflect.Modifier import java.util.Date import javax.inject.{Inject, Singleton} +import org.apache.tinkerpop.gremlin.process.traversal.P import org.apache.tinkerpop.gremlin.structure.Graph import org.janusgraph.core.schema.ConsistencyModifier import org.janusgraph.graphdb.types.TypeDefinitionCategory @@ -82,8 +83,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => db.tryTransaction { implicit g => // For each organisation, if there is no custom taxonomy, create it - db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "custom").headOption match { + db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => + Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "_freetags").headOption match { case None => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") @@ -114,7 +115,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { val tagStr = tagString( tag.property("namespace").value().toString, tag.property("predicate").value().toString, - tag.property("value").value().toString + tag.property ("value").orElse("") ) tag.property("namespace", "_freetags") tag.property("predicate", tagStr) @@ -125,8 +126,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { } }.map(_ => ()) } - .updateGraph("Add manageTaxonomy to org-admin profile", "Profile") { traversal => - Try(traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageTaxonomy").iterate()) + .updateGraph("Add manageTaxonomy to admin profile", "Profile") { traversal => + Try(traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTaxonomy").iterate()) Success(()) } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 0a30d74c74..c567696848 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -51,11 +51,10 @@ class OrganisationSrv @Inject() ( } yield createdOrganisation def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { - val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) val activeTaxos = getByName("admin").taxonomies.toSeq for { newOrga <- createEntity(e) - _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), newOrga) + _ <- taxonomySrv.createFreetag(newOrga) _ <- activeTaxos.toTry(t => organisationTaxonomySrv.create(OrganisationTaxonomy(), newOrga, t)) _ <- auditSrv.organisation.create(newOrga, newOrga.toJson) } yield newOrga diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index b172b44c6b..2051c64930 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -36,20 +36,20 @@ class TaxonomySrv @Inject() ( def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - organisation <- organisationSrv.getOrFail(authContext.organisation) - richTaxonomy <- createWithOrg(taxo, tags, organisation) + taxonomy <- createEntity(taxo) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) + _ <- activate(richTaxonomy._id) } yield richTaxonomy - def createWithOrg(taxo: Taxonomy, - tags: Seq[Tag with Entity], - organisation: Organisation with Entity) - (implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { + val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) for { - taxonomy <- createEntity(taxo) + taxonomy <- createEntity(customTaxo) + richTaxonomy <- Try(RichTaxonomy(taxonomy, Seq())) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) - _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + } override def getByName(name: String)(implicit graph: Graph): Traversal.V[Taxonomy] = Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0)) @@ -57,7 +57,7 @@ class TaxonomySrv @Inject() ( def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { taxo <- get(taxonomyId).getOrFail("Taxonomy") - organisations <- Try(organisationSrv.startTraversal.filter(_ + organisations <- Try(organisationSrv.startTraversal.filterNot(_ .out[OrganisationTaxonomy] .filter(_.unsafeHas("namespace", taxo.namespace)) ).toSeq) @@ -67,8 +67,8 @@ class TaxonomySrv @Inject() ( def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { taxo <- get(taxonomyId).getOrFail("Taxonomy") - _ <- Try(organisationSrv - .get(authContext.organisation) + _ <- Try(organisationSrv.startTraversal + .filterNot(_.unsafeHas("name", "admin")) .outE[OrganisationTaxonomy] .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) .remove()) diff --git a/thehive/test/org/thp/thehive/DatabaseBuilder.scala b/thehive/test/org/thp/thehive/DatabaseBuilder.scala index 51767a822f..52094c2147 100644 --- a/thehive/test/org/thp/thehive/DatabaseBuilder.scala +++ b/thehive/test/org/thp/thehive/DatabaseBuilder.scala @@ -35,6 +35,7 @@ class DatabaseBuilder @Inject() ( observableSrv: ObservableSrv, observableTypeSrv: ObservableTypeSrv, taskSrv: TaskSrv, + taxonomySrv: TaxonomySrv, tagSrv: TagSrv, keyValueSrv: KeyValueSrv, dataSrv: DataSrv, @@ -82,11 +83,15 @@ class DatabaseBuilder @Inject() ( createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++ createVertex(attachmentSrv, FieldsParser[Attachment]) ++ createVertex(tagSrv, FieldsParser[Tag]) ++ + createVertex(taxonomySrv, FieldsParser[Taxonomy]) ++ createVertex(pageSrv, FieldsParser[Page]) ++ createVertex(dashboardSrv, FieldsParser[Dashboard]) createEdge(organisationSrv.organisationOrganisationSrv, organisationSrv, organisationSrv, FieldsParser[OrganisationOrganisation], idMap) createEdge(organisationSrv.organisationShareSrv, organisationSrv, shareSrv, FieldsParser[OrganisationShare], idMap) + createEdge(organisationSrv.organisationTaxonomySrv, organisationSrv, taxonomySrv, FieldsParser[OrganisationTaxonomy], idMap) + + createEdge(taxonomySrv.taxonomyTagSrv, taxonomySrv, tagSrv, FieldsParser[TaxonomyTag], idMap) createEdge(roleSrv.userRoleSrv, userSrv, roleSrv, FieldsParser[UserRole], idMap) diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala new file mode 100644 index 0000000000..d08034f2c9 --- /dev/null +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -0,0 +1,204 @@ +package org.thp.thehive.controllers.v1 + +import org.thp.scalligraph.controllers.FakeTemporaryFile +import org.thp.thehive.TestAppBuilder +import org.thp.thehive.dto.v1.{InputEntry, InputPredicate, InputTaxonomy, InputValue, OutputTag, OutputTaxonomy} +import play.api.libs.Files +import play.api.libs.json.Json +import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} +import play.api.mvc.MultipartFormData.FilePart +import play.api.test.{FakeRequest, PlaySpecification} + +case class TestTaxonomy( + namespace: String, + description: String, + version: Int, + tags: List[OutputTag] +) + +object TestTaxonomy { + def apply(outputTaxonomy: OutputTaxonomy): TestTaxonomy = + TestTaxonomy( + outputTaxonomy.namespace, + outputTaxonomy.description, + outputTaxonomy.version, + outputTaxonomy.tags.toList, + ) +} + +class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { + "taxonomy controller" should { + + val inputTaxo = InputTaxonomy( + "test-taxo", + "A test taxonomy", + 1, + None, + None, + List( + InputPredicate("pred1", None, None, None), + InputPredicate("pred2", None, None, None) + ), + Some(List( + InputValue("pred1", List( + InputEntry("entry1", None, None, None, None)) + ), + InputValue("pred2", List( + InputEntry("entry2", None, None, None, None), + InputEntry("entry21", None, None, None, None) + )) + )) + ) + + "create a valid taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(inputTaxo)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val resultCase = contentAsJson(result).as[OutputTaxonomy] + + TestTaxonomy(resultCase) must_=== TestTaxonomy( + "test-taxo", + "A test taxonomy", + 1, + List( + OutputTag("test-taxo", "pred1", Some("entry1"), None, 0), + OutputTag("test-taxo", "pred2", Some("entry2"), None, 0), + OutputTag("test-taxo", "pred2", Some("entry21"), None, 0) + ) + ) + } + + "return error if not admin" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(inputTaxo)) + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(403).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("AuthorizationError") + } + + "return error if namespace is present in database" in testApp { app => + val alreadyInDatabase = inputTaxo.copy(namespace = "taxonomy1") + + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(alreadyInDatabase)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("already exists") + } + + "return error if namespace is empty" in testApp { app => + val emptyNamespace = inputTaxo.copy(namespace = "") + + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(emptyNamespace)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + + } + + "get a taxonomy present" in testApp { app => + val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[TaxonomyCtrl].get("taxonomy1")(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + val resultCase = contentAsJson(result).as[OutputTaxonomy] + + TestTaxonomy(resultCase) must_=== TestTaxonomy( + "taxonomy1", + "The taxonomy 1", + 1, + List(OutputTag("taxonomy1", "pred1", Some("value1"), None, 0)) + ) + } + + "return error if taxonomy is not present in database" in testApp { app => + val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy404") + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].get("taxonomy404")(request) + status(result) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("NotFoundError") + } + + "import zip file correctly" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] + zipTaxos.size must beEqualTo(2) + } + + "return error if zip file contains other files than taxonomies" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-otherfiles.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("formatting") + } + + "return error if zip file contains an already present taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-present.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("already exists") + } + + "return error if zip file contains a bad formatted taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-badformat.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("formatting") + } + + /* + "activate a taxonomy" in testApp { app => + + } + + "deactivate a taxonomy" in testApp { app => + + } + + "delete a taxonomy" in testApp { app => + + } + + */ + } + + def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = MultipartFormData( + // file must be place in test/resources/ + dataParts = Map.empty, + files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), + badParts = Seq() + ) + +} diff --git a/thehive/test/resources/data/OrganisationTaxonomy.json b/thehive/test/resources/data/OrganisationTaxonomy.json new file mode 100644 index 0000000000..df6a1338b2 --- /dev/null +++ b/thehive/test/resources/data/OrganisationTaxonomy.json @@ -0,0 +1,5 @@ +[ + {"from": "admin", "to": "taxonomy1"}, + {"from": "cert", "to": "taxonomy1"}, + {"from": "soc", "to": "taxonomy1"} +] \ No newline at end of file diff --git a/thehive/test/resources/data/Tag.json b/thehive/test/resources/data/Tag.json index c6136decb4..094be1895a 100644 --- a/thehive/test/resources/data/Tag.json +++ b/thehive/test/resources/data/Tag.json @@ -68,5 +68,12 @@ "predicate": "testPredicate", "value": "world", "colour": 0 + }, + { + "id": "taxonomy-tag1", + "namespace": "taxonomy1", + "predicate": "pred1", + "value": "value1", + "colour": 0 } ] \ No newline at end of file diff --git a/thehive/test/resources/data/Taxonomy.json b/thehive/test/resources/data/Taxonomy.json new file mode 100644 index 0000000000..500c39c010 --- /dev/null +++ b/thehive/test/resources/data/Taxonomy.json @@ -0,0 +1,8 @@ +[ + { + "id": "taxonomy1", + "namespace": "taxonomy1", + "description": "The taxonomy 1", + "version": "1" + } +] \ No newline at end of file diff --git a/thehive/test/resources/data/TaxonomyTag.json b/thehive/test/resources/data/TaxonomyTag.json new file mode 100644 index 0000000000..80806c707c --- /dev/null +++ b/thehive/test/resources/data/TaxonomyTag.json @@ -0,0 +1,3 @@ +[ + {"from": "taxonomy1", "to": "taxonomy-tag1"} +] \ No newline at end of file diff --git a/thehive/test/resources/machinetag-badformat.zip b/thehive/test/resources/machinetag-badformat.zip new file mode 100644 index 0000000000000000000000000000000000000000..aae10498e3ed41fec6c29969e841a5ab9785e8d3 GIT binary patch literal 4274 zcmb7{Rag}4wt$BQ3F(&Zly3YbL^_56B&EA%1{jA>lys2pkWMLqA!q0wKuUfXb%GfK{x95#000UA2f)GJ$;B7! zZ4VXmfcW}=OvwR6WZzMCe~R&U1;9uFIQTnw0Koq)d8R!E8xJYbKZKtwkUk{03FKdO_?wmGEPo5+V;MLg7(%#>ptKH=Y_WvtWL6d()Gc+M>E95{ea@7IOv z*k_V3MLmpVOB+O)ja9#TiyO0vq8ITkqOt#OEVJ6br9nU>$VsArf3*g*dEh$a7exme zjBP*2+O%l5UI<=))A%fKUi#SKJe#odI-s=P&!ggCu4WkrcJ4b=S{mf=!e3!kve)sl zu7TPD^_7xi)465=fq8EFF6^bsig5)xLMHiXp*N8|i7WI~eFDCA!|tjsukgU65%M0X z0zI_;!mHx%#+sDVtccK6nMTM69%ON>9%9iuZ?Lk_?D$2~?o+ZeisVbB0${r#6$xHz zvAd0x-GSf4E?#lP&-fq#sM0_%7_OMLUnrY*7ss&dfz$e#C97$6rStH0bb(C5+6bF` z!lfaq^(yZo&Z&>lrymr!lJ_aR4_JyumL9Z6S* zU7mpSp_7GqSk>Z7 zQUCzuZ&<#)S?j4q}=~bFPZ? z^Ehq0)L}jt@XYeu;P&T6fNM^yL#EqW_^5-LPToNyrAd#CRV^G5#T6J@L+yc|0*5|F z<__QXS4~X_kUvJR3QqDbe@s`JrkJtW{i0hXKIXpDZIG;6;7bz?=smrL1G?L>%IiEjN}e-LS| z`;Tn=jH4j}BEr^jNE#;@?AW;hL@+z1>42D?<2&EqkCo{ZgH4#^=)F)yS_tIkfF0^e zEw-okO?f@u4A0ND8LuGBJ(5HL>wA(daDd;YNBjlL^DOOI=ke8e60)XFSuKuS<22}yvx!o<9@x4n z+DX`d2b}k#*OAjlKlyJ99L8M}m?n?eiA8UaY1+-b&l~*@Fr&6BJ>&^@bJvufySw;t z#Gicok=Uc}q&B=}c->ud@){x89YYblU2jKBfGkdhIZ}mN7MGF@^d6XB1 zZlQb<8ZR}LfN<`IFNJG%zg|2H7vtXOTS9*pa}G^#C^yw!Btqk(z%|X1tA0XC7WPcJ z+Vm@AtMec0D{`f6`Qi#8O{=33X06q?8IHJpC94B@Km|Wm=AQ0USU^flw)HFD@CeCh znEHe)hf5kyv>o-6Kw{D*Xz)92h-@(sKW;_3ji>CgL|gzWc6f9#mHTbHRDq$znr51^ zo16Pk#(s-BiyT(+D_!K+qB5NhNi>mo##ch9=sn062eY%(3d>R?FOO{D8->voaJFRe zx$T+eIr9KH|9ZY{xg`M=qgxU)@Ol9qb_IpJ&cVp;i9YU6&#+i&(v9gj<|X92iBi5Q zKzq#GkaUJRe{2(goj>7}RLG;JZcYlWghXAZ^HL^-b22xS+lyQ%%ncw!i3y6r;$xq~ z9i{j&2NLP-d4?R*Se|=WXsDR-{Mp|q5N?(zuwkXRUl?sfdfC0Q6@0>4q$F3G^v_;FPC_VvoiZBBwp%JGw=?sW8nAFUJF^Jt!*tj2!d zD4`y8f91l)iLGS`GGB$!zH)gaM+A>-aAI*(@uqdttYQ%QZzl#_4`>+o53Twv2QnraNN7O`=4J1C8X*#zbb}Uu6~GeX|8Ir?ACM7GmU9u zX1mY6;85ZHgln=H92`wIU&_rlyphYGeaaLiWmkKa14^{!cQjIT&R@OdIHO9Y zYQm+kWZ9HxcQ(TPa7a@un){3!J1Zt?qHmKH5Q;5U(M9&ptw9$DSW5FXaeQHFi3|*z zM3!A?N-|D;r6R82Mm_qykOqerazYYDTMq(qf~^sXq(e*|oA84YCEL06J&ABmCNfEA zJXsJR35o1bfJR8!k=4Vq!ym4?ShLWfDPJk$yft3dd^3mU#o4yh(E|C3)Y=e!M^01f=TfMVNJ0MMd1^Wu&JW6la) z$3|Oj2-e&2PzVqvvQu_Bd5eG;AW)IBH`xW8LEC+ZgN%G<2MUpJV6%xD!}A*QTPUFjbCm=)2B{p2qDIoFDeQ zF$;9=lwnzx?n|sBMg%X*%7aUjn^&KX;r~e#A^}+x`?Z zB`DM)QU2_bJb`j|9ys!&b)06XJgs!&o72t687F6V_M`<{1p~Z{sWLv(=0YgUkIwqD z-oK-O^57(P#oY)awa0nwf*O9&;1_DfOcc80s`4L>O+;%kQ@m-%qgIsCFK($g)hRY9 zpzgaWgNl3f)jkHs;u)@FaDG9 z%x=vH4)L{HT^KO3UgB)pU}uYA-EmLRxNfa1KKd_gHOri$4Yx=HEExMyqukQI=1UY` z{t+rCj)HG-CpNDrn5K1b_F#a-Cfm<4j4@xf{)>5YVAN0T0ZCVTzbFXF7L*Y8h5V7V!!XPq)R!h}%%ks8!=Pa8*TPQy z(5pu2GhH`Fe|RL#j4+Fm;_ADUfVYlK_!068V+-v({KR(S>tKas#T#M5%_=9Q0j%Z7 zHPr~`hF4^9+0+rDqC*m^SloD6PDNHqA(ycx0vt~w>9Gf0TBLi~`0KY82F4mkUok$` zALRGG;w4e(i(_}{P51oiM%D80F}^0On-$$YvFBW)8iybY@F*SDkvHC={H}NMxh&o7 zKwhg$tv#=*Tb7y*kINwe8&YJcqAgCcc=)O0(~2%JiF4MoFR0g=hA{ajZzrVf=CeB3JW{gnL(okAEi0gZ&ad@=Czy0yok7JX;(8=&dKd1cqNR z72*5_3_SFYk2OroBcGi3$aT)}HZwtXiOn!G#W~}HZbM)CW1DA^o*WLGSwB!R>v}(9 zymTP@*<~R>)4AG`oP@;k4>dgTB2wy`g2()UcXeNMLK=LwUtV4P{)HUHip#J@(CAgk zQtPFw+Ga>DR6V-QUk*LDTYu062o6Kao=opR5He0@PdWq*rbeN9GS-v<_R=;KNJuZj zj%n2$^H%WjGLvA7kM1Xl%Z_i1)y*unz5 z4Z1RH-D9<~D-xJHdHRPJtVf-0M$c$)T1wY;4_R6_$;2m9$1uIy^?WR$c~(ZbGc3A# z*tOtPo1L`0?vNbE_ap2=MoHy?FKGH*yidK2R;laCUKrKCU~a(NmvIoHwq;T~EGt`A z!x}f@E5B)utvvfV)zc3386qM}Vjk=t3c|{t4|?-69tMDZ>|LRH449V9j?C7@6XO&? zzpQOy1Gql1z_Z|7_2wc^{+@Sr=zBUWN_7h?WQH4ll_+=Vve0$hLK0mz5b}znUiEsM zs716Mx2`3sqpi^N%h^d53Ids|j%JLJY(Wtf?7Qq&N`1^h7F$^?Qv9@cFj>VA_2jBi zgUIv&pbu1tR^a_o?kZ8CB&5-JMC7Kh+KpfNq9f@zJpM5cfU}D9*_dbHjksjh(cMr? zISrQd=h+P6?zf~75x-NnYBnba0RE_%kuDA{E#7|||DQtl$MOFaP2qn||DEywv-ck* k{O8<1O8Bp63Kzu%{MUkLq)R~frvvWqZ2!Hl-~V&_A2=xanE(I) literal 0 HcmV?d00001 diff --git a/thehive/test/resources/machinetag-otherfiles.zip b/thehive/test/resources/machinetag-otherfiles.zip new file mode 100644 index 0000000000000000000000000000000000000000..cac42ffef199dfb9148db6dd29e41cdd970023df GIT binary patch literal 3841 zcmb7{XE+?%x5h^&N_1gF?=@dz%?dMo9!gM)VRbdW{w_dM{DNh#(H4 z1yM!|Mi*{!o_qfHJonpwt!MAG_Luj4KklEg9uYAkfc$qeI3i2{|64r2Cl!DT;OPT{ zJG*&0Nx%_suqicw%zUrS_MhXApac*QZx8_he_w}>swPc-ntlckVkAw3B*cXVB@GQs zgoex?>4FCziR=DYCIlGkk&#oUkdyNL*8k&S{2zBWB=kS-xiSC%?QeH4I|o-cZzs5& zi-LqZ%*Xrh1TNxe+uOZuw%mPa+y9pUYlV$lk*DwY45Eci>k}ol-|ympZs9MBK5oWH zsSL3*D~!>L!^9JB2+!s4oKo3te7oF>CQFM_jo(I zjv06`c6BaBtrTd-vl|3@@(vK4_zigLKA&BgFRC65_AJt?z3vO7%yv7;Bu?KOAR!}d z!opIysSx{4bzqXo5iNVv_>{oW_I8Y1hXi8uahCowRkVdrb{5pWrr2VAeA|@YJ$!I_ zve{$-W$vCRuDL0u0;M)uDTPn~*Xrxk1$1mLi|jlNLLtkj;mrsLn%ucBfn{N>JJ8EI zMq6|fJ?ih@X4$qKa0S9g_=Dr#!lv{skFosuVc;MrVRFBji?460r_mKt%$=wa!XQ&K z#LFAfjYxp+s(aiq@M(t5q!V^Aj)JPbLtdLJ+awh};PhCjR38edh;kJ5+kobr^f+*P z8zlW@L2zsHF+z*a?7*TY&@}b(#@Cg;+n8aSg>LHj>!}M`kIhZuSn|(4eQ4a?PfBZk zGor4}se78inXLm6J)N(I9)p06`B^ggA1Ae-p2thp#mbLq4oa=DCI;tTmDmVN2STFM zj>Uor9Y3cVh&w`m_z8us4dHu zwGoKTht)3*N0>ELUZpz__7*Mn=YSM^+1a|gQV{;h(V37JK4B5kQ3#Duc`oNvz9?Jz zhXLf2bMT-yIxzV{5OM5+Y%^cUX_2H5Tw-_c_)GQ=Y_S4MBZOg`wu_f{SI%yYK7$%j z^eatl-=aK?2~9SdfEOqtRrLBK5R0(2)DF#1q%MtY6c|P@<#9J=2)J&U<~Z?zxF0-S zw_KBgOEAqz7h+>q_xVW$E?+61<+Zy{TvBv|IDKdq zLL5EhmR87Nrf*0LDu=zgNaLqX4C7|2E433lR+#EXiIbBQgvP}@MLNg`Vs@m`+;WV# z#&LW%aPSZb%X!DQYZZO;7yYc9QB65E}*~$5Ry8P_t@yW2(6G?0uPE=jyj|e?_St1GwzG0YmnR_<>)>BCi|JSVl0ok`-csP{7M%^I1hih>TNv- zn1_iYfzC;w#W-wR*ZRf6!BtkgO7i~0#ICgV+b2z7wGrkx|#5t?XU@Tz}8n zT5V^&{o}x}^ESh+?Om&03sRXcHR57`nAb^iIi7l zfsxe0)nfX4RIJiboIgfB&JKW64#kot{kRvb*g0xPQIpG+u~nIcrq+HL7JTz9gHQxK&l3Vru8J$sqK4w7x2YGZ{%T z%fzfc^f0p;jFJ*gMfW{D4UA9GZ+KA%vs`@t%DJJU-m**IEypyvnT_Kn=bZhm7%P^; zXsiur&=CA`@r|xBsuYc)%+T?A>R$viDn=~HX3wn)1sT`%;V~_tYz$3h6_Vf{yzoH& z&G-wd2?7EwS0CEdaQ?`7@HTC-lCp~(aEG>C>t{P!e9BLwaX-C6u42?Vzi8f+Y;!Wg zZFfLRB8nH!i<^`Xe{5i#>K}qDRMA8CO)bG^`+>!|S_D1_^#m4{RWi%YR3$mb-eNHq z=tq6#t>8NQXLm)UjMr}aX9Yn}ij)Ja?yJb1A|;!tMP|E|d%(g7}CRSO1z4lbE`sNln2&!@Z$9RYQMJ%B=+6adJg@a^$67si|&zKn~qToJCd<)aZI zP2i;Mbo3Gfv%uhD`0&g;?!fh4mv%XixX7y@^5NmxHa9xhiI^(-Gsh9@nkxeDo9ocl zz|i-zn(o|QNEVLe666}#0ekMAWez&O2a&dRa2?ivU0cPE_fa!t8vkhhdV!D=kuxr# zJ0O&4S)*`~7(KG^BJ~k7w!g2@d^eEqu8|7U#PW63tEHPrk@)I|pUBv{Ve$~_a5l54 zFuW~A@%l0Oe zlcR#>B&e%&) zv<{ZvHn8p&1984q^k}|wwyNOn$Y_){8%=l%5xt^}L1AOrp>E;hJo?_V61b#$Z?ziO zru^rxN=zN3|BaH7_3`(_6Nkqd-tVby;2>jlUMZRdifLG9n|$S`1&Leg=5w6Qgaiv7 zXkvrm76n)}U1d#44c#YVQsiA}ukfz!yXp;HQEAOuJhv#L4Khc!LB-`qepyI?>RH=w z7pI}7+m{AJOVLoeLGlNr_uafNWZ*fOku6?w4kIkii}Ft2Jp~W2Z@aW2xFnaZ^bnxP zS}FXv;l>)vvcs05NzGD8T+{r7wE&|A6kwcpGe0$!c#^h+xX!Z$UNX#!djOFTF#F$$if3eD$(KD; zS*tWovGC(=DIHZY&E51RF%UM9qT3F9YLVt??Pt)G9}r^_b#_b5;FF-&89#+eZ!D){ zPnyTMD_!FqHDWD97khil=$2EJYAlM%-@SNH_nyfb?N9xKuO(@&JNL9Z)mw5Zy5#AZ zhBzQ#fTj`PTMQ%P2YzDqualY zx$_}Mw#&CW0YRZ?`GfHd7)s6&|FBiq@XIhY*`oGFt3Hx=2?Xs zrFX1EAl9oe_d?Aya9t|M$KnG(6zT&*+PW)-keuZ;-(=X3;|TTlGVDyZaf{K;EQ@FB z;OiTlfebrd4&xaJ8jF`UcY#f-RFY#UBbc7`S^;24j+Jrt1W->Ow-l6Oy^)&J6`bYp zZirLFII%R~8AG45*P)m3B7IHCGvnH4Y;~BMa`qzhHmvHqB_+%1IFpaW%Ha($<@ob2 z-7RqM0WzvYwoiQnfw+4|pS%Qb?fQdHw$9qR4O!>S_RN+g6Jiy?zaZ8z{yd+7$P6S; zt-094zouR6dmjyo(_Mk`*^oxxrAnPUE%Y4L&}64|r2LX?FM8Y$)FYbqo0bwZ+O67| z=Q9&66olTgJDA;yHIgcZ4I|U~LEdl?Mxl3e z*^6ZP(y)&vLt>Ztm9B!y$E}I`VR34F0PYIPCnFyDmy*&Id)EWer3^UAbNmGA`fq4^ zqhZxxP7MJ3K{I1L0zyWj|AWSV1Kl5J{HN-}{%rpZLjGI%4-o#_^$!sKQ}tn?zt{g3 W4~_LmNdG7h{_dvVQ-wf^0JP`2! literal 0 HcmV?d00001 diff --git a/thehive/test/resources/machinetag-present.zip b/thehive/test/resources/machinetag-present.zip new file mode 100644 index 0000000000000000000000000000000000000000..07a6812f6540fd60c1a27c077326f18742418161 GIT binary patch literal 3941 zcmb7{Wmpv4w#NsgOBxC3PDv$ILSA6VVE{?#9AajWaR@=$K~fr|Q(A`ZhG9@ac?Br} ziJ=6BM!0gHd(L^E`|bSKvvzz~d++uCw0}l=w}==4WPidP2{i`%pYmh@09pVy0Alax z=I-qbw|7;P^nm$zgH5Rcq^=TZySx1zc02=UJAVWvfPiS@769Lru6U{XA?w9h0(cuBBs!X=qEO zW~Lkmwqtq0;%31|VB^CTN*-Ibr^0q)nw^){9l(2GFf4oV#aCrn`x13pYK|)L8f*_Fjd08@9^xR zclzOEdE&x%&BO59ZLFmwBFbA^ug^1m+2}8Ao%0(6tGmKUx3Udkk~*r z?^B<7jX~vX1xk0WnZD24#qE>6Cu2|dV)RO(b^?cffEVBJtrNdtZ{3%(EAz!Q6G2|Z zdUe+W0hBrJCs{=4d&9(}BrRB2Di0N6-?;%y{AFCr0W~=-c(lD8E8iuFm@vr(zEDG3 z2zI|ymUO<_9BuQv)imO7Y4Ohyn$=TN$8`K4L>@JJ#z4Sw@ zmr-Mz5!Prjmw`lpE!6;tfJ=yAe8Q!~UX3Ylgkz<1Rn{+Rt)rp^~<>|#7QRb!WeHg}G3Dty@4 zM7azIwXTeI67$=D=AQI9@_6ef|7}5VXVV0s#cy_C(HCHvdP(thW#BGm%yyxdI^lZy zg4T0$lPHetqt5^uxA%+EM!@V=Pxtf#&7iFI;fTKO*P|vNcBg`D*@6#Ww4h$cOEx7c zCNu|SHdtf*b4V37!qSoOR%%yy!Y_dEaz1e)rkJt2$^Vv#)Ja5ODcb~9sQH%WR!F8z&AQDg(479+=5&yf9YwllrUU!cklRf&QENK zB1^M1!z67FAK$LL{Th8HHKO=;n)tp&MH&;DbRrQiSWKb>84`>`*jZ|aWGYdYMK%kL zA(--cnllC6woG%K`9VC7o~>K1Ny8U{9gD43y z;=+*l*k?#bSs~1hbeemvA@?MXp8^LDmQ?wM|GNWBm?;5jSST70>olS~?O9k0Jm4tQ z*;#uq=I;Lxb&U^5_VRx>WTe*Ih8$nc`!-FDWvuN>xZg=!Qwn0kVB~p=1Da}A4Jf8d zy3gL1`-RMIxU##8ZYJcBN9YTGHXzzso8M8lF2Z{h4a!dX;^~^Cc1Sq;kH5=(WUc&^ zPu=^|7DRTXiz1wdzgqRSnFGwjB#`Vb$)Lq}Y)8-f#lpc=c7kfk{*$Dhw9dOHEfZPa zI{D8zjD3I7!aZsqOrZA82+mHdX@ zodL}9FUmP|AKC%yQv8xAuSf$TsYPnUfqPV}GSOT=$3M&tgWn#CCx7wdS+L^ZteZf6 zS+0t!&MGps@yoQ}e~{@{-}TsEH1@d9+;D3{aIZu`jK45CjP6@PKBt0;K}69j(z&TS z)s1gW?R_>GgkMiIRtIyXAc<$0nAJz0WYvIC(juwofoG=y32%W-p+zvu#rIJzO_hz7 zJwW$d)0kE^&f8pb4tHX$SPo;bwxAIM@TS4)2lC9n7-m?4Sdz<`TNe&A zY8b#{+CtbETFNV>zUXrIp z;Db<4WMNq)wd_t+mUrqe5qE`t05Wd{H8{MuFDh-ccGo{U&>E#gIn3&@irgtyww+$y zk`CixrILZiQw0E$(CBtWc!aDSRUI-b?B1e_H9J!$ZKw*tYvY+m63UXfT+O2ut|O{y z@uI*VnjEpsGRBQu#%}A|9gb12p1G`AFbH;W%QixVp8S3}4KZ*6Gzj$pifrEkKo&(A z$Jg8#a}M}2F4}TMq|T0?Mwld#i?-VdA`WJO!Nu|6S@}Ex>;0~s@}BXLSAk?>W3wIZ zbg&a~HS|Z$Bi1!H1l~8#v7?D$;8zXZxr4AA9Lp`lJ-h?<+C9q}ad{6SY474bZ2Y>m zik}>yX38@9+5YtcAulR#R7!V1DBHG1?kYKPWC0=Z5jS#psL^^qfd9UsD$~^Rb#>Iz zZKP;I&66QgwqBTmHFY?f*>o6S&Mc27t$+L}xw~%4pNI8#Wa!ZbYqGmZi1N}g=h#zj zdMPEvnyDDvJ{{YA#~srH!<|k=#0(OwaOM7t^5ADiJyl8EcwRHzstQ#6T!wMzzetas zB~agwFtu)o%$q2Oy1b7+KrGI72 zfzg=n;RiC`y`h2g6C`%WU5cS~umW}g4Zj(P3bbM-3S4s3gm%X#qP5v*!rN}qE6M5? zHJ2ai7MbMJ_n(!*r9Ap;9)oQwe*Lb(G*}P5Q8u(W{*iRzXrkf$o{9oz{iy-+R-j{)d_w?RZZ|aFoYt`bt zLm8u=HL(pUsW|e>Mhe!<+I_z`4Kdxm)W5Y91Em`wdqnck-TOioo|_ff29b9hXK`6n zaQ@*Xbb!t1(Td=fTDsChfFkRp@skD{Yb?u-TS~^YOQrGAk8oA&(@Hjc;t_~IT)k$Q zrG54HD8anFj_f!Z!P$-2+`>SHmZ2{@gXA_@zLp_(@)YX6n>Rg=I@cMLQ4xY~YNXIS zZ%Zom$jo%+Wh#!!PheR=MQgG$d3r%Bb6-e(0JJ1ZE5F!;m@d(SNFG zkiYbMW}|L6^n)B;&&|;f8Obmu#;&Zi_$I~wwPPbu#DkgfnKphQGCS-tR54lUQjBD^ z(n)y`XE}O7H_Ef(6|KYd9YgiRvvwFjSCq9-U- zk2Q`yyYpCoNCDrR_P~p|pK2Tuj#Onx;l#6dlyf)~wCfiiYnYl#{b{0}_lV_nMuNf|r(s5lbNYL|hW@n2Hcw?dxgB^i zPden6f#)$^y0Go6(jbuObWL$~LSotbYW{d}S&dbZeWB;qwcmAv8obwEo}K;qh3v&j z$#X<704o*ffoW>C=`u5w53lm(gOBW%?{)(ML(mEblN&IUyc7OOyNJQ(F*s1(n%3W5 z&V~jJ>qFVGF1ll0i#(oZ6={~)u@bemS%rBNX{Lc2Qb9fz^#C!b4-9GNp%hGfmfw1t zVMCrX#2;eNoo?$MtDRMzz}Ce-FfwC3=5#rRXCP=US=!uXZ&{_1`t)`j)3;tH$R3<) zWt20;t_Q>|1-`Y}NX_jD%65D=$|Y))RQCJ@!+;Cq5Ms1QUt9XZsO|+@1Ln58gDAZ% ztNLzf>9RV`_ydtjcvEZz{`_-q8{B)Clq!jBXka)1_uyyLy`;i@)iR7&_7k~2iH&=)LzeXhJu0REBNW&k}WiH(odX8&o($fYK z0jZAAK92+Sh?f19r9_QRt4`+mtRxFX;S3H(vpbQT0TJa~n_Oqg{cHgiYnkk_LX6i4 z1*IU3$rn~=(Jd(wVb{9}FqPbKBkanFKF zDVfT>>*1I(1{~!%ehPIBFw!F>d!u31WKInL+@P7!zoBj23Gr`eqa^?UsQy5kFVq?4 z><#}9u&w;+u)9gzpZtHo28pky>F1`>4Cv}R$caQRey$!1jVa-k4-GYaQ+OFz&yZ|J z5l(!v8zfEv=wubqb|m)+9SnMPd!?%0QNgHbba45VEAl119O-u2{gMuQiM9)^@W#P|9R@NOng4Nmub%%;nG(<}3l01v>| z(b>b(&lTb5t|;XNf8+--rvZ>bCD9Hy``R6N`_T@8NGbpU@g@-f@SjVxX`5f)E$aDS zEKwUSn{zb}GC*4sMmnU{ML=|u$Y$~&`Q`qnVt=2Z`)rXbHxq*<(Et$^?@wNX@4{ZB z1SQyvG`k2?!k7Xg(}9(CKE#76ZAnEG;?1fLMgr`!NEvUIqW!&&T2A#j zb_cw(yjS@H_))N$LulA!OS2GTP~+h%MD%T{15mp3}9? z9gb67;u^;~DqPZgUM?_(w;f()s2+k5_nxOhq^kmgWG_J^MsoQRy_U5`Rdban{rP6b zUOzXF2}5t@?yjYn)gs+QPQxG{{vo2D0YiTJPv%w^N@~Z$d`b-Juls|jay@@$6KCuX zk&uyoz{1mbsgVb+4G@y)F&!t=x~VXLV6CDC5}FZ5zrOX=vEXIP43p8#J0HJ z6Xa_brz^gN9uEv`w`t#bdLDcPnO$ zFx1=v`HWJwIRxOp=9O>)e3+#-?TTGWprCH-RM6$gHBCbdxtghzgJ96A7#Hz?O<3N~ zUT0oE!<4_Q2ySedA$0^S4y}5F%+oF@Kd<)R#EjZ6_Ru6=&s@-ZZ*39BlYe~FkH+o) zpt2LRAnNX#QPd90ZX1f~?Rq(41_rtm=ExPkpVoo-oGjaws+!RrmfK-X4bOe6u~9b8 zghXkb6^TCpA}a+XNz5ZU5dQ#xZfbY`;H>7_d5leY`L}hSqS+IHJo*!3k8mLw?I+sv z;1K>>PsD4szMb5Pl;U6QoA3B6EIgqzlp~NAhwslsihCD9sF@B*{sNA`;>rhB(U!V|HcI zJ@brtCUF9kI7GOV>KFW9?GVB&NpQnrald$n3Ds%$;(F*IXOZ6Sy5gv3pa$w1AC%$~ z_-4>Vy}31HY$gB83=NjKt~c>c2T5%im;-~6=PL*nxuD0x%y7N-u}p5HBms*^W7dyex;8hTtGZq z^Rt@=EWjm0fNm+^r37qy_r}HI;Z;teTI#|5hK0?xyC)kn78Uojuz=D^iw9WeiKwu}CX1--jvzW513dR{7uxPJImrPk5(~H#RE?sFpOMW^-Kc4NZSMGJi%I0=cwksOf1j+-B+R3%91KMw@PsXP$X%3kMe+_q@}M zI9s-(SgbvG*a-4$>6N}JsvM1?%F^?F7+3-~E=4Zi&Yj;72{mcx$75O}IG8?ER7pd6 z@uEY8bcq+#Qv?J$9*^kPo(o1VKz8U;RaD&_fxGk_IzKwlk~0BX%?BA(@>Sz*g(VB- zWLwiwo_j+&QZf8^e%!Q_q?w^zT3|S?Sj_<4KeLRO8vvH(>kvFbY9_Im#dD*FD5ed{mfMhhf zO$iYt=RjQ_k{x+#$qfo*>7akAO7PNj_O7IgR33NpNTvI*ntFm5=$kfYT(hicBe$u? z#!kEQi)Rnr)~uL>I(g(8VZ!%+J(=+}asf05_X3LTUjx8a#hEA9JQzz(#0oCPW>vJ_ zL4a0-G>MzO%f(j$!Uji3;Gbs~@CI%4xp&BWCq!R`l8=tgwRTte~^r$2d)66 zdCp-m6A75N$mdP(8?&bH)KC3ASYLal0e!eO&_hy;kAXGUk!s6FIXTWk)rjtNbngvM zY&RTtIu#W=K(NZ4_x**J04vH`HEHv4?M#arNa<5K#;NZjBW9AYQ}oNQ_oc;S*G_rh z!tK7KN^(@_f`THfG^J_j0hTb#moP`yPd0qWTRr@`sCnSvqVpYQV#EGi)SRSHmqPWk zTgrI5r|ZDJKeO{BTjf!uhtP~cMh=#S+tr&HW*Z9dF=fd3%$y6SwcN+|XT5nvix414 z>WaS zg^*y;8%=CD+NuPvWvH$zt78}>CJnhG>-)UB=ZQVmbP3pqqyc66(+yz>~F+meFoHy`0cY-YDk6p!@s zl>rhQT`z;5G}>HeTXEi2HmzGOONhCPs|L;}+wn_8AwzNRw99QAYreh^%HMC#iKi8s z+l4W*Ba+x}r~INNrJhwm(TN41Ylo4TZNwv{tTC^?Q*EPy zsP)BjyY!}epL;Mg-?~SvgY4$)XdT~ntx=ChQ3raJ4(ltLuG9Yj9eyrL_t;g`?b2+` ztLj!@WFg{nO2mZ~*{JDBQ_SssEcv)-Ku+PBW&O4NnT~O60Zhnlbl24jFYbA&bwoH^ zohglz$k|I+NJQo8TvPCGsR;>Bz3}2b4Hin^*=Tto$0~`Wyz{ z3rL7FPRpa27=Ooi%=R)fQDL6TI5X8XIo0g{K(2Rh!iqea7eQ4Z`&o|u=S z_ZHYin`L)x#h`X;aIa$RbVx%Q_>t8+fH>?CJjB6EIh^FIpoNZUQ=Tg#(ATId!`?Ga zH@hN{qf?-NcosV9ayg1;B4{pM-r56xSfiGncs+*c-KZA=hUeLu5c^yr5+CVBO-Tt)K>rgZ5!@-B; zB&`nH4%UV2WGf|+OipKu8_`@rQ5D=<+-E9%96?s=SwJ~q=4+&aa+p?1)u>T)`T*Du zA;v87W-`+l~Jzbbs9VzpF9w&*}eb%KuFNBZdEb`G2Ro d{+7tU)BkUZXktJ@`e!P_-`)Ou=b-y@`w!^JiR1tP literal 0 HcmV?d00001 From a6d7dfc4f4638fcaac54c6f8649c758b21fbd2fe Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 17:19:23 +0100 Subject: [PATCH 19/93] Fixed schema --- .../app/org/thp/thehive/models/TheHiveSchemaDefinition.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index a509a89059..5a6ffb2a02 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -103,7 +103,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { } .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => - db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "_freetags").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), From 09f0dc7adb490721b18e30cb92a623d2712bb155 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 25 Nov 2020 16:10:54 +0100 Subject: [PATCH 20/93] Fixed unit test for taxonomy --- .../controllers/v1/TaxonomyCtrlTest.scala | 93 +++++++++++++------ thehive/test/resources/data/Taxonomy.json | 8 +- 2 files changed, 74 insertions(+), 27 deletions(-) diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala index d08034f2c9..6c320635ac 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -2,11 +2,11 @@ package org.thp.thehive.controllers.v1 import org.thp.scalligraph.controllers.FakeTemporaryFile import org.thp.thehive.TestAppBuilder -import org.thp.thehive.dto.v1.{InputEntry, InputPredicate, InputTaxonomy, InputValue, OutputTag, OutputTaxonomy} +import org.thp.thehive.dto.v1._ import play.api.libs.Files import play.api.libs.json.Json -import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.mvc.MultipartFormData.FilePart +import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.test.{FakeRequest, PlaySpecification} case class TestTaxonomy( @@ -22,7 +22,7 @@ object TestTaxonomy { outputTaxonomy.namespace, outputTaxonomy.description, outputTaxonomy.version, - outputTaxonomy.tags.toList, + outputTaxonomy.tags.toList ) } @@ -39,15 +39,18 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { InputPredicate("pred1", None, None, None), InputPredicate("pred2", None, None, None) ), - Some(List( - InputValue("pred1", List( - InputEntry("entry1", None, None, None, None)) - ), - InputValue("pred2", List( - InputEntry("entry2", None, None, None, None), - InputEntry("entry21", None, None, None, None) - )) - )) + Some( + List( + InputValue("pred1", List(InputEntry("entry1", None, None, None, None))), + InputValue( + "pred2", + List( + InputEntry("entry2", None, None, None, None), + InputEntry("entry21", None, None, None, None) + ) + ) + ) + ) ) "create a valid taxonomy" in testApp { app => @@ -113,7 +116,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { .withHeaders("user" -> "certuser@thehive.local") val result = app[TaxonomyCtrl].get("taxonomy1")(request) - status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}") val resultCase = contentAsJson(result).as[OutputTaxonomy] TestTaxonomy(resultCase) must_=== TestTaxonomy( @@ -178,27 +181,65 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { (contentAsJson(result) \ "message").as[String] must contain("formatting") } - /* - "activate a taxonomy" in testApp { app => + "activate a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy2")(request1) + status(result1) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result1)}") + + val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy2", isActive = true)(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") + + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy2")(request3) + status(result3) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result3)}") + } + + "deactivate a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") - } + val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy1/deactivate") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy1", isActive = false)(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - "deactivate a taxonomy" in testApp { app => + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } - } + "delete a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") - "delete a taxonomy" in testApp { app => + val request2 = FakeRequest("DELETE", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].delete("taxonomy1")(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - } + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } - */ } - def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = MultipartFormData( + def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = // file must be place in test/resources/ - dataParts = Map.empty, - files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), - badParts = Seq() - ) + MultipartFormData( + dataParts = Map.empty, + files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), + badParts = Seq() + ) } diff --git a/thehive/test/resources/data/Taxonomy.json b/thehive/test/resources/data/Taxonomy.json index 500c39c010..5c661448dc 100644 --- a/thehive/test/resources/data/Taxonomy.json +++ b/thehive/test/resources/data/Taxonomy.json @@ -3,6 +3,12 @@ "id": "taxonomy1", "namespace": "taxonomy1", "description": "The taxonomy 1", - "version": "1" + "version": 1 + }, + { + "id": "taxonomy2", + "namespace": "taxonomy2", + "description": "The taxonomy 2", + "version": 1 } ] \ No newline at end of file From 6f8c4fe641d647c7a942e7f77e1c118a5d4877f7 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 25 Nov 2020 16:18:57 +0100 Subject: [PATCH 21/93] Fixed user permission test --- thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala index dd68b7d3a9..e7ac8f762c 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala @@ -109,7 +109,6 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder { Permissions.managePage, Permissions.manageObservable, Permissions.manageAlert, - Permissions.manageTaxonomy, Permissions.manageAction, Permissions.manageConfig ), From 471b8947abbc8b1d0486f250a1ba16e86b9adc62 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 8 Dec 2020 11:37:36 +0100 Subject: [PATCH 22/93] Review changes --- .../thp/thehive/controllers/v1/DescribeCtrl.scala | 4 +++- .../thehive/models/TheHiveSchemaDefinition.scala | 1 - .../org/thp/thehive/services/TaxonomySrv.scala | 15 +++++++-------- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala index 8db4d8e430..8afb150c0d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala @@ -39,6 +39,7 @@ class DescribeCtrl @Inject() ( // pageCtrl: PageCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, + taxonomyCtrl: TaxonomyCtrl, userCtrl: UserCtrl, customFieldSrv: CustomFieldSrv, impactStatusSrv: ImpactStatusSrv, @@ -100,7 +101,8 @@ class DescribeCtrl @Inject() ( EntityDescription("customField", customFieldCtrl.publicProperties.list.flatMap(propertyToJson("customField", _))), EntityDescription("observableType", observableTypeCtrl.publicProperties.list.flatMap(propertyToJson("observableType", _))), EntityDescription("organisation", organisationCtrl.publicProperties.list.flatMap(propertyToJson("organisation", _))), - EntityDescription("profile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))) + EntityDescription("profile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))), + EntityDescription("taxonomy", taxonomyCtrl.publicProperties.list.flatMap(propertyToJson("taxonomy", _))) // EntityDescription("dashboard", dashboardCtrl.publicProperties.list.flatMap(propertyToJson("dashboard", _))), // EntityDescription("page", pageCtrl.publicProperties.list.flatMap(propertyToJson("page", _))) ) ++ describeCortexEntity("case_artifact_job", "/connector/cortex/job", "JobCtrl") ++ diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 5a6ffb2a02..8f03bb4186 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -93,7 +93,6 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("namespace", "_freetags") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) - taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) case _ => Success(()) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 2051c64930..aab26143cf 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -27,9 +27,8 @@ class TaxonomySrv @Inject() ( def existsInOrganisation(namespace: String)(implicit graph: Graph, authContext: AuthContext): Boolean = { startTraversal - .has(_.namespace, namespace) - .in[OrganisationTaxonomy] - .v[Organisation] + .getByNamespace(namespace) + .organisations .current .exists } @@ -39,7 +38,6 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) - _ <- activate(richTaxonomy._id) } yield richTaxonomy def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { @@ -59,7 +57,8 @@ class TaxonomySrv @Inject() ( taxo <- get(taxonomyId).getOrFail("Taxonomy") organisations <- Try(organisationSrv.startTraversal.filterNot(_ .out[OrganisationTaxonomy] - .filter(_.unsafeHas("namespace", taxo.namespace)) + .v[Taxonomy] + .has(_.namespace, taxo.namespace) ).toSeq) _ <- organisations.toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) } yield Success(()) @@ -68,9 +67,9 @@ class TaxonomySrv @Inject() ( for { taxo <- get(taxonomyId).getOrFail("Taxonomy") _ <- Try(organisationSrv.startTraversal - .filterNot(_.unsafeHas("name", "admin")) + .hasNot(_.name, "admin") .outE[OrganisationTaxonomy] - .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) + .filter(_.otherV.v[Taxonomy].has(_.namespace, taxo.namespace)) .remove()) } yield Success(()) @@ -80,7 +79,7 @@ object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { def get(idOrName: EntityId): Traversal.V[Taxonomy] = - traversal.getByIds(idOrName) + idOrName.fold(traversal.getByIds(_), getByNamespace) def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) From 673ded1d7039cb58861af421a5674a3f2017107f Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 11 Nov 2020 10:50:39 +0100 Subject: [PATCH 23/93] Added taxonomy to database's schema --- .../org/thp/thehive/models/Organisation.scala | 3 ++ .../models/TheHiveSchemaDefinition.scala | 30 +++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/thehive/app/org/thp/thehive/models/Organisation.scala b/thehive/app/org/thp/thehive/models/Organisation.scala index 41ca8dd5c2..b7ad03b1fc 100644 --- a/thehive/app/org/thp/thehive/models/Organisation.scala +++ b/thehive/app/org/thp/thehive/models/Organisation.scala @@ -20,6 +20,9 @@ case class OrganisationShare() @BuildEdgeEntity[Organisation, Organisation] case class OrganisationOrganisation() +@BuildEdgeEntity[Organisation, Taxonomy] +case class OrganisationTaxonomy() + case class RichOrganisation(organisation: Organisation with Entity, links: Seq[Organisation with Entity]) { def name: String = organisation.name def description: String = organisation.description diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index eeab7f15fd..c9ece8d6fc 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -11,9 +11,11 @@ import org.janusgraph.graphdb.types.TypeDefinitionCategory import org.reflections.Reflections import org.reflections.scanners.SubTypesScanner import org.reflections.util.ConfigurationBuilder +import org.thp.scalligraph.{EntityId, RichSeq} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.janus.JanusDatabase import org.thp.scalligraph.models._ +import org.thp.scalligraph.traversal.Traversal import org.thp.scalligraph.traversal.TraversalOps._ import play.api.Logger @@ -84,6 +86,34 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .iterate() Success(()) } + // Taxonomies + .addVertexModel[String]("Taxonomy", Seq("namespace")) + .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => + db.tryTransaction { g => + db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + val taxoVertex = g.addVertex("Taxonomy") + taxoVertex.property("namespace", "Custom") + o.addEdge("OrganisationTaxonomy", taxoVertex) + Success(()) + } + }.map(_ => ()) + } + .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => + db.tryTransaction { implicit g => + db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "Custom").head + Traversal.V(EntityId(o.id())).unionFlat( + _.out("OrganisationShare").out("ShareCase").out("CaseTag"), + _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), + _.in("AlertOrganisation").out("AlertTag"), + _.in("CaseTemplateOrganisation").out("CaseTemplateTag") + ).toSeq.foreach(tag => + customTaxo.addEdge("TaxonomyTag", tag) + ) + Success(()) + } + }.map(_ => ()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() From fb2c911020c0fec86dc9ce1b2212b8e453e60a24 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 12 Nov 2020 11:13:26 +0100 Subject: [PATCH 24/93] WIP adding taxonomy routes & mapping --- .../org/thp/thehive/dto/v0/Taxonomy.scala | 43 +++++++++++ .../org/thp/thehive/dto/v1/Taxonomy.scala | 43 +++++++++++ .../thp/thehive/controllers/v0/CaseCtrl.scala | 23 ++++-- .../thehive/controllers/v0/Conversion.scala | 21 ++++++ .../thehive/controllers/v0/TaxonomyCtrl.scala | 74 +++++++++++++++++++ .../thehive/controllers/v1/Conversion.scala | 22 ++++++ .../org/thp/thehive/models/Permissions.scala | 2 + .../app/org/thp/thehive/models/Taxonomy.scala | 36 +++++++++ .../thehive/services/OrganisationSrv.scala | 2 + .../thp/thehive/services/TaxonomySrv.scala | 66 +++++++++++++++++ 10 files changed, 324 insertions(+), 8 deletions(-) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala create mode 100644 thehive/app/org/thp/thehive/models/Taxonomy.scala create mode 100644 thehive/app/org/thp/thehive/services/TaxonomySrv.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala new file mode 100644 index 0000000000..a5af5ff61e --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala @@ -0,0 +1,43 @@ +package org.thp.thehive.dto.v0 + +import java.util.Date + +import play.api.libs.json.{Json, OFormat, OWrites} + +case class InputTaxonomy ( + namespace: String, + description: String, + version: Int, + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate(value: String, expanded: String) + +case class InputValue(predicate: String, entry: Seq[InputPredicate]) + +object InputTaxonomy { + implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +} + +case class OutputTaxonomy( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + namespace: String, + description: String, + version: Int, + predicates: Seq[OutputPredicate], + values: Option[Seq[OutputValue]] +) + +case class OutputPredicate(value: String, expanded: String) + +case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) + +object OutputTaxonomy { + implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} \ No newline at end of file diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala new file mode 100644 index 0000000000..1c6a1b2bc9 --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -0,0 +1,43 @@ +package org.thp.thehive.dto.v1 + +import java.util.Date + +import play.api.libs.json.{Json, OFormat, OWrites} + +case class InputTaxonomy ( + namespace: String, + description: String, + version: Int, + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate(value: String, expanded: String) + +case class InputValue(predicate: String, entry: Seq[InputPredicate]) + +object InputTaxonomy { + implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +} + +case class OutputTaxonomy( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + namespace: String, + description: String, + version: Int, + predicates: Seq[OutputPredicate], + values: Option[Seq[OutputValue]] +) + +case class OutputPredicate(value: String, expanded: String) + +case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) + +object OutputTaxonomy { + implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala index f8fcd8aeed..71dff80ff5 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala @@ -195,13 +195,17 @@ class PublicCase @Inject() ( with CaseRenderer { override val entityName: String = "case" override val initialQuery: Query = - Query.init[Traversal.V[Case]]("listCase", (graph, authContext) => organisationSrv.get(authContext.organisation)(graph).cases) - override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Case]]( - "getCase", - FieldsParser[EntityIdOrName], - (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(authContext) - ) - override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput]( + Query.init[Traversal.V[Case]]("listCase", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).cases + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Case]]( + "getCase", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => caseSrv.get(idOrName)(graph).visible(authContext) + ) + override val pageQuery: ParamQuery[OutputParam] = + Query.withParam[OutputParam, Traversal.V[Case], IteratorOutput]( "page", FieldsParser[OutputParam], { @@ -215,7 +219,10 @@ class PublicCase @Inject() ( } } ) - override val outputQuery: Query = Query.outputWithContext[RichCase, Traversal.V[Case]]((caseSteps, authContext) => caseSteps.richCase(authContext)) + override val outputQuery: + Query = Query.outputWithContext[RichCase, Traversal.V[Case]]((caseSteps, authContext) => + caseSteps.richCase(authContext) + ) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query[Traversal.V[Case], Traversal.V[Observable]]("observables", (caseSteps, authContext) => caseSteps.observables(authContext)), Query[Traversal.V[Case], Traversal.V[Task]]("tasks", (caseSteps, authContext) => caseSteps.tasks(authContext)) diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala index f972afd972..345a2a643e 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala @@ -571,6 +571,27 @@ object Conversion { .transform } + implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { + + def toTaxonomy: Taxonomy = + inputTaxonomy + .into[Taxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .transform + } + + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.predicates) + .withFieldComputed(_.values, _.values) + .transform + ) + implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala new file mode 100644 index 0000000000..ac6622e6d2 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala @@ -0,0 +1,74 @@ +package org.thp.thehive.controllers.v0 + +import javax.inject.{Inject, Named, Singleton} +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.{Database, UMapping} +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, PublicPropertyListBuilder, Query, QueryExecutor} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v0.Conversion.taxonomyOutput +import org.thp.thehive.dto.v1.InputTaxonomy +import org.thp.thehive.models.{Permissions, RichTaxonomy, Taxonomy} +import org.thp.thehive.services.{OrganisationSrv, TaxonomySrv} +import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ +import play.api.mvc.{Action, AnyContent} + +class TaxonomyCtrl @Inject() ( + override val entrypoint: Entrypoint, + @Named("with-thehive-schema") override val db: Database, + @Named("v0") override val queryExecutor: QueryExecutor, + override val publicData: PublicTaxonomy +) extends QueryCtrl { + def importTaxonomy: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("file", FieldsParser.file.optional.on("file")) + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: Option[FFile] = request.body("file") + val taxonomy: InputTaxonomy = request.body("taxonomy") + + // Create Taxonomy vertex + // Create Tags associated + // Add edge orgaTaxo + + ??? + } + +} + +@Singleton +class PublicTaxonomy @Inject() ( + taxonomySrv: TaxonomySrv, + organisationSrv: OrganisationSrv +) extends PublicData { + override val entityName: String = "taxonomy" + override val initialQuery: Query = + Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).taxonomies + ) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, taxoSteps, _) => taxoSteps.page(range.from, range.to, withTotal = true)(???) + ) + override val outputQuery: Query = Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((taxonomySteps, authContext) => + taxonomySteps.richTaxonomy(authContext) + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( + "getTaxonomy", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) + ) + override val publicProperties: PublicProperties = + PublicPropertyListBuilder[Taxonomy] + .property("namespace", UMapping.string)(_.field.readonly) + .property("description", UMapping.string)(_.field.readonly) + .property("version", UMapping.int)(_.field.readonly) + // Predicates ? + // Values ? + .build + +} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index ac556fca70..eb3e8f4d2e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -5,6 +5,7 @@ import java.util.Date import io.scalaland.chimney.dsl._ import org.thp.scalligraph.controllers.Renderer import org.thp.scalligraph.models.Entity +import org.thp.thehive.dto.v0.{InputTaxonomy, OutputTaxonomy} import org.thp.thehive.dto.v1._ import org.thp.thehive.models._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -251,6 +252,27 @@ object Conversion { .transform } + implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { + + def toTaxonomy: Taxonomy = + inputTaxonomy + .into[Taxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .transform + } + + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.predicates) + .withFieldComputed(_.values, _.values) + .transform + ) + implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index 14b45cf5fc..de57993ad3 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,6 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") @@ -35,6 +36,7 @@ object Permissions extends Perms { manageAction, manageConfig, manageProfile, + manageTaxonomy, manageTag, manageCustomField, manageShare, diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala new file mode 100644 index 0000000000..3ed3c6f0cb --- /dev/null +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -0,0 +1,36 @@ +package org.thp.thehive.models + +import java.util.Date + +import org.thp.scalligraph.models.Entity +import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} + +@BuildVertexEntity +case class Taxonomy( + namespace: String, + description: String, + version: Int +) + +case class Predicate(value: String) + +case class Value(predicate: String, entry: Seq[String]) + +@BuildEdgeEntity[Taxonomy, Tag] +case class TaxonomyTag() + +case class RichTaxonomy( + taxonomy: Taxonomy with Entity, + predicates: Seq[Predicate], + values: Seq[Value] +) { + def _id: EntityId = taxonomy._id + def _createdBy: String = taxonomy._createdBy + def _updatedBy: Option[String] = taxonomy._updatedBy + def _createdAt: Date = taxonomy._createdAt + def _updatedAt: Option[Date] = taxonomy._updatedAt + def namespace: String = taxonomy.namespace + def description: String = taxonomy.description + def version: Int = taxonomy.version + +} diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 69af5f84de..e74b249a54 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -138,6 +138,8 @@ object OrganisationOps { def shares: Traversal.V[Share] = traversal.out[OrganisationShare].v[Share] + def taxonomies: Traversal.V[Taxonomy] = traversal.out[OrganisationTaxonomy].v[Taxonomy] + def caseTemplates: Traversal.V[CaseTemplate] = traversal.in[CaseTemplateOrganisation].v[CaseTemplate] def users(requiredPermission: Permission): Traversal.V[User] = diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala new file mode 100644 index 0000000000..7bd16db5c6 --- /dev/null +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -0,0 +1,66 @@ +package org.thp.thehive.services + +import java.util.{Map => JMap} + +import javax.inject.{Inject, Named} +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.thehive.models.{Organisation, OrganisationTaxonomy, Predicate, RichTaxonomy, Tag, Taxonomy, TaxonomyTag, Value} +import org.thp.thehive.services.OrganisationOps._ + +import scala.util.Try + +@Singleton +class TaxonomySrv @Inject() ( +)(implicit + @Named("with-thehive-schema") db: Database +) extends VertexSrv[Taxonomy] { + + val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] + + def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + for { + taxonomy <- createEntity(taxo) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- RichTaxonomy(taxonomy, ???, ???) + } yield richTaxonomy +} + +object TaxonomyOps { + implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + + def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) + + def visible(organisationIdOrName: EntityIdOrName): Traversal.V[Taxonomy] = + traversal.filter(_.organisations.get(organisationIdOrName)) + + def organisations: Traversal.V[Organisation] = traversal.in[OrganisationTaxonomy].v[Organisation] + + def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag] + + def richTaxonomy(implicit authContext: AuthContext): Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.tags.fold) + ) + .domainMap { + case (taxonomy, tags) => + val predicates = tags.map(t => Predicate(t.predicate)).distinct + val values = predicates.map { p => + val tagValues = tags + .filter(_.predicate == p.value) + .filter(_.value.isDefined) + .map(_.value.get) + Value(p.value, tagValues) + } + RichTaxonomy(taxonomy, predicates, values) + } + + } +} From 2327ffa2b0cdef7cf01af926d4bd86908ebb39a0 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 12 Nov 2020 18:52:40 +0100 Subject: [PATCH 25/93] WIP Continued mapping for taxonomies --- .../org/thp/thehive/dto/v0/Taxonomy.scala | 43 -------- .../org/thp/thehive/dto/v1/Taxonomy.scala | 17 +-- .../thehive/controllers/v0/Conversion.scala | 21 ---- .../thehive/controllers/v0/TaxonomyCtrl.scala | 74 ------------- .../thehive/controllers/v1/Conversion.scala | 26 +++-- .../thehive/controllers/v1/Properties.scala | 10 ++ .../thehive/controllers/v1/TaxonomyCtrl.scala | 103 ++++++++++++++++++ .../app/org/thp/thehive/models/Taxonomy.scala | 8 +- .../app/org/thp/thehive/services/TagSrv.scala | 12 +- .../thp/thehive/services/TaxonomySrv.scala | 42 +++---- 10 files changed, 168 insertions(+), 188 deletions(-) delete mode 100644 dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala delete mode 100644 thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala deleted file mode 100644 index a5af5ff61e..0000000000 --- a/dto/src/main/scala/org/thp/thehive/dto/v0/Taxonomy.scala +++ /dev/null @@ -1,43 +0,0 @@ -package org.thp.thehive.dto.v0 - -import java.util.Date - -import play.api.libs.json.{Json, OFormat, OWrites} - -case class InputTaxonomy ( - namespace: String, - description: String, - version: Int, - predicates: Seq[InputPredicate], - values: Option[Seq[InputValue]] -) - -case class InputPredicate(value: String, expanded: String) - -case class InputValue(predicate: String, entry: Seq[InputPredicate]) - -object InputTaxonomy { - implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] -} - -case class OutputTaxonomy( - _id: String, - _type: String, - _createdBy: String, - _updatedBy: Option[String] = None, - _createdAt: Date, - _updatedAt: Option[Date] = None, - namespace: String, - description: String, - version: Int, - predicates: Seq[OutputPredicate], - values: Option[Seq[OutputValue]] -) - -case class OutputPredicate(value: String, expanded: String) - -case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) - -object OutputTaxonomy { - implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] -} \ No newline at end of file diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 1c6a1b2bc9..a2d05e879c 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -4,17 +4,18 @@ import java.util.Date import play.api.libs.json.{Json, OFormat, OWrites} +// TODO make sure of input format case class InputTaxonomy ( namespace: String, description: String, version: Int, - predicates: Seq[InputPredicate], - values: Option[Seq[InputValue]] + predicates: Seq[String], + values: Option[Seq[InputEntry]] ) -case class InputPredicate(value: String, expanded: String) +case class InputEntry(predicate: String, entry: Seq[InputValue]) -case class InputValue(predicate: String, entry: Seq[InputPredicate]) +case class InputValue(value: String, expanded: String, colour: Option[String]) object InputTaxonomy { implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] @@ -30,13 +31,13 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, - predicates: Seq[OutputPredicate], - values: Option[Seq[OutputValue]] + predicates: Seq[String], + values: Option[Seq[OutputEntry]] ) -case class OutputPredicate(value: String, expanded: String) +case class OutputEntry(predicate: String, entry: Seq[OutputValue]) -case class OutputValue(predicate: String, entry: Seq[OutputPredicate]) +case class OutputValue(value: String, expanded: String) object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala index 345a2a643e..f972afd972 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala @@ -571,27 +571,6 @@ object Conversion { .transform } - implicit class InputTaxonomyOps(inputTaxonomy: InputTaxonomy) { - - def toTaxonomy: Taxonomy = - inputTaxonomy - .into[Taxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .transform - } - - implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( - _.into[OutputTaxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.predicates, _.predicates) - .withFieldComputed(_.values, _.values) - .transform - ) - implicit class InputUserOps(inputUser: InputUser) { def toUser: User = diff --git a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala deleted file mode 100644 index ac6622e6d2..0000000000 --- a/thehive/app/org/thp/thehive/controllers/v0/TaxonomyCtrl.scala +++ /dev/null @@ -1,74 +0,0 @@ -package org.thp.thehive.controllers.v0 - -import javax.inject.{Inject, Named, Singleton} -import org.thp.scalligraph.EntityIdOrName -import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} -import org.thp.scalligraph.models.{Database, UMapping} -import org.thp.scalligraph.query.{ParamQuery, PublicProperties, PublicPropertyListBuilder, Query, QueryExecutor} -import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs -import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} -import org.thp.thehive.controllers.v0.Conversion.taxonomyOutput -import org.thp.thehive.dto.v1.InputTaxonomy -import org.thp.thehive.models.{Permissions, RichTaxonomy, Taxonomy} -import org.thp.thehive.services.{OrganisationSrv, TaxonomySrv} -import org.thp.thehive.services.OrganisationOps._ -import org.thp.thehive.services.TaxonomyOps._ -import play.api.mvc.{Action, AnyContent} - -class TaxonomyCtrl @Inject() ( - override val entrypoint: Entrypoint, - @Named("with-thehive-schema") override val db: Database, - @Named("v0") override val queryExecutor: QueryExecutor, - override val publicData: PublicTaxonomy -) extends QueryCtrl { - def importTaxonomy: Action[AnyContent] = - entrypoint("import taxonomy") - .extract("file", FieldsParser.file.optional.on("file")) - .extract("taxonomy", FieldsParser[InputTaxonomy]) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val file: Option[FFile] = request.body("file") - val taxonomy: InputTaxonomy = request.body("taxonomy") - - // Create Taxonomy vertex - // Create Tags associated - // Add edge orgaTaxo - - ??? - } - -} - -@Singleton -class PublicTaxonomy @Inject() ( - taxonomySrv: TaxonomySrv, - organisationSrv: OrganisationSrv -) extends PublicData { - override val entityName: String = "taxonomy" - override val initialQuery: Query = - Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => - organisationSrv.get(authContext.organisation)(graph).taxonomies - ) - override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( - "page", - FieldsParser[OutputParam], - (range, taxoSteps, _) => taxoSteps.page(range.from, range.to, withTotal = true)(???) - ) - override val outputQuery: Query = Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((taxonomySteps, authContext) => - taxonomySteps.richTaxonomy(authContext) - ) - override val getQuery: ParamQuery[EntityIdOrName] = - Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( - "getTaxonomy", - FieldsParser[EntityIdOrName], - (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) - ) - override val publicProperties: PublicProperties = - PublicPropertyListBuilder[Taxonomy] - .property("namespace", UMapping.string)(_.field.readonly) - .property("description", UMapping.string)(_.field.readonly) - .property("version", UMapping.int)(_.field.readonly) - // Predicates ? - // Values ? - .build - -} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index eb3e8f4d2e..ae205ddd65 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -5,7 +5,7 @@ import java.util.Date import io.scalaland.chimney.dsl._ import org.thp.scalligraph.controllers.Renderer import org.thp.scalligraph.models.Entity -import org.thp.thehive.dto.v0.{InputTaxonomy, OutputTaxonomy} +import org.thp.thehive.dto.v1.{InputTaxonomy, OutputTaxonomy} import org.thp.thehive.dto.v1._ import org.thp.thehive.models._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -263,15 +263,20 @@ object Conversion { .transform } - implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( - _.into[OutputTaxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.predicates, _.predicates) - .withFieldComputed(_.values, _.values) - .transform - ) + implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = + Renderer.toJson[RichTaxonomy, OutputTaxonomy]( + _.into[OutputTaxonomy] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.version, _.version) + .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) + .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { + val outputValues = entryMap.getOrElse(tag.predicate, Seq()) + val value = OutputValue(tag.value.getOrElse(""), tag.description.getOrElse("")) + entryMap + (tag.predicate -> (outputValues :+ value)) + }).map(e => OutputEntry(e._1, e._2))) + .transform + ) implicit class InputUserOps(inputUser: InputUser) { @@ -357,6 +362,7 @@ object Conversion { .withFieldComputed(_.tlp, _.tlp.getOrElse(2)) .transform } + implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable](richObservable => richObservable .into[OutputObservable] diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index a41f6a537d..9832b6b6f3 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -491,4 +491,14 @@ class Properties @Inject() ( .property("attachment.contentType", UMapping.string.optional)(_.select(_.attachments.value(_.contentType)).readonly) .property("attachment.id", UMapping.string.optional)(_.select(_.attachments.value(_.attachmentId)).readonly) .build + + lazy val taxonomy: PublicProperties = + PublicPropertyListBuilder[Taxonomy] + .property("namespace", UMapping.string)(_.field.readonly) + .property("description", UMapping.string)(_.field.readonly) + .property("version", UMapping.int)(_.field.readonly) + // Predicates ? + // Values ? + .build + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala new file mode 100644 index 0000000000..d60db4d141 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -0,0 +1,103 @@ +package org.thp.thehive.controllers.v1 + +import javax.inject.{Inject, Named} +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.Database +import org.thp.scalligraph.query._ +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.InputTaxonomy +import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} +import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ +import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} +import play.api.mvc.{Action, AnyContent, Results} + +import scala.+: + +class TaxonomyCtrl @Inject() ( + entrypoint: Entrypoint, + properties: Properties, + taxonomySrv: TaxonomySrv, + organisationSrv: OrganisationSrv, + tagSrv: TagSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + + override val entityName: String = "taxonomy" + override val publicProperties: PublicProperties = properties.taxonomy + override val initialQuery: Query = + Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => + organisationSrv.get(authContext.organisation)(graph).taxonomies + ) + override val getQuery: ParamQuery[EntityIdOrName] = + Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( + "getTaxonomy", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => taxonomySrv.get(idOrName)(graph).visible(authContext) + ) + override val pageQuery: ParamQuery[OutputParam] = + Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, traversal, authContext) => + traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy(authContext)) + ) + override val outputQuery: Query = + Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, authContext) => + traversal.richTaxonomy(authContext) + ) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags) + ) + + def importTaxonomy: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("file", FieldsParser.file.optional.on("file")) + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: Option[FFile] = request.body("file") + val inputTaxo: InputTaxonomy = request.body("taxonomy") + + // TODO Parse file & combine with body + + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + Some(e.expanded), + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour)) + + ) + } + ) + + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + ) + + for { + tagsEntities <- tags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) + } + + def delete(namespace: String): Action[AnyContent] = + entrypoint("delete taxonomy") + .authTransaction(db) { implicit request => implicit graph => + for { + t <- taxonomySrv.getByNamespace(namespace) + + } yield Results.Nocontent + } + +} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 3ed3c6f0cb..925956d9cd 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -12,17 +12,12 @@ case class Taxonomy( version: Int ) -case class Predicate(value: String) - -case class Value(predicate: String, entry: Seq[String]) - @BuildEdgeEntity[Taxonomy, Tag] case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - predicates: Seq[Predicate], - values: Seq[Value] + tags: Seq[Tag with Entity] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy @@ -32,5 +27,4 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version - } diff --git a/thehive/app/org/thp/thehive/services/TagSrv.scala b/thehive/app/org/thp/thehive/services/TagSrv.scala index a035558eaa..d937be0d92 100644 --- a/thehive/app/org/thp/thehive/services/TagSrv.scala +++ b/thehive/app/org/thp/thehive/services/TagSrv.scala @@ -19,27 +19,31 @@ class TagSrv @Inject() (appConfig: ApplicationConfig, @Named("integrity-check-ac @Named("with-thehive-schema") db: Database ) extends VertexSrv[Tag] { - val autoCreateConfig: ConfigItem[Boolean, Boolean] = + private val autoCreateConfig: ConfigItem[Boolean, Boolean] = appConfig.item[Boolean]("tags.autocreate", "If true, create automatically tag if it doesn't exist") def autoCreate: Boolean = autoCreateConfig.get - val defaultNamespaceConfig: ConfigItem[String, String] = + private val defaultNamespaceConfig: ConfigItem[String, String] = appConfig.item[String]("tags.defaultNamespace", "Default namespace of the automatically created tags") def defaultNamespace: String = defaultNamespaceConfig.get - val defaultColourConfig: ConfigItem[String, Int] = + private val defaultColourConfig: ConfigItem[String, Int] = appConfig.mapItem[String, Int]( "tags.defaultColour", "Default colour of the automatically created tags", { - case s if s(0) == '#' => Try(Integer.parseUnsignedInt(s.tail, 16)).getOrElse(defaultColour) + case s if s(0) == '#' => parseTagColour(s.tail) case _ => defaultColour } ) + def defaultColour: Int = defaultColourConfig.get + // TODO Duplication in Tag.scala + def parseTagColour(c: String) = Try(Integer.parseUnsignedInt(c, 16)).getOrElse(defaultColour) + def parseString(tagName: String): Tag = Tag.fromString(tagName, defaultNamespace, defaultColour) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 7bd16db5c6..8a7e906979 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -4,36 +4,48 @@ import java.util.{Map => JMap} import javax.inject.{Inject, Named} import org.apache.tinkerpop.gremlin.structure.Graph -import org.thp.scalligraph.{EntityIdOrName, RichSeq} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} -import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs -import org.thp.thehive.models.{Organisation, OrganisationTaxonomy, Predicate, RichTaxonomy, Tag, Taxonomy, TaxonomyTag, Value} +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ import scala.util.Try @Singleton class TaxonomySrv @Inject() ( -)(implicit - @Named("with-thehive-schema") db: Database + organisationSrv: OrganisationSrv, + tagSrv: TagSrv +)(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Taxonomy] { val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] + val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - taxonomy <- createEntity(taxo) - _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- RichTaxonomy(taxonomy, ???, ???) + taxonomy <- createEntity(taxo) + organisation <- organisationSrv.getOrFail(authContext.organisation) + _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy +/* + + def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = + Try(startTraversal.getByNamespace(namespace)).getOrElse(startTraversal.limit(0)) +*/ + } object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) + def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) def visible(organisationIdOrName: EntityIdOrName): Traversal.V[Taxonomy] = @@ -49,18 +61,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { - case (taxonomy, tags) => - val predicates = tags.map(t => Predicate(t.predicate)).distinct - val values = predicates.map { p => - val tagValues = tags - .filter(_.predicate == p.value) - .filter(_.value.isDefined) - .map(_.value.get) - Value(p.value, tagValues) - } - RichTaxonomy(taxonomy, predicates, values) - } - + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } } } From 9960e4dbd6db12da59846fdabce3dcc6b44b4c2f Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 15:18:24 +0100 Subject: [PATCH 26/93] WIP create taxonomiy / get works --- .../thp/thehive/dto/v1/CustomFieldValue.scala | 1 + .../org/thp/thehive/dto/v1/Taxonomy.scala | 30 +++++- .../thehive/controllers/v1/Conversion.scala | 10 +- .../thp/thehive/controllers/v1/Router.scala | 8 ++ .../thehive/controllers/v1/TaxonomyCtrl.scala | 93 +++++++++++-------- .../org/thp/thehive/models/Permissions.scala | 2 +- .../models/TheHiveSchemaDefinition.scala | 17 +++- .../thehive/services/OrganisationSrv.scala | 10 +- .../thp/thehive/services/TaxonomySrv.scala | 8 +- 9 files changed, 125 insertions(+), 54 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala index 6e72438d06..06d6fb16e4 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/CustomFieldValue.scala @@ -70,6 +70,7 @@ object InputCustomFieldValue { } case _ => Good(Nil) } + implicit val writes: Writes[Seq[InputCustomFieldValue]] = Writes[Seq[InputCustomFieldValue]] { icfv => val fields = icfv.map { case InputCustomFieldValue(name, Some(s: String), _) => name -> JsString(s) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index a2d05e879c..f0ebfb9659 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,7 +2,11 @@ package org.thp.thehive.dto.v1 import java.util.Date -import play.api.libs.json.{Json, OFormat, OWrites} +import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable +import org.scalactic.{Bad, Good, One} +import org.thp.scalligraph.InvalidFormatAttributeError +import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} +import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} // TODO make sure of input format case class InputTaxonomy ( @@ -17,6 +21,20 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) +object InputEntry { + implicitly[FieldsParser[Option[Seq[InputEntry]]]] + + implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] + + implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] +} + +object InputValue { + implicit val parser: FieldsParser[InputValue] = FieldsParser[InputValue] + + implicit val writes: Writes[InputValue] = Json.writes[InputValue] +} + object InputTaxonomy { implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] } @@ -32,7 +50,7 @@ case class OutputTaxonomy( description: String, version: Int, predicates: Seq[String], - values: Option[Seq[OutputEntry]] + values: Seq[OutputEntry] ) case class OutputEntry(predicate: String, entry: Seq[OutputValue]) @@ -41,4 +59,12 @@ case class OutputValue(value: String, expanded: String) object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] +} + +object OutputEntry { + implicit val format: OFormat[OutputEntry] = Json.format[OutputEntry] +} + +object OutputValue { + implicit val format: OFormat[OutputValue] = Json.format[OutputValue] } \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index ae205ddd65..c6905ff5cd 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -266,15 +266,19 @@ object Conversion { implicit val taxonomyOutput: Renderer.Aux[RichTaxonomy, OutputTaxonomy] = Renderer.toJson[RichTaxonomy, OutputTaxonomy]( _.into[OutputTaxonomy] + .withFieldComputed(_._id, _._id.toString) + .withFieldConst(_._type, "Taxonomy") .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { val outputValues = entryMap.getOrElse(tag.predicate, Seq()) - val value = OutputValue(tag.value.getOrElse(""), tag.description.getOrElse("")) - entryMap + (tag.predicate -> (outputValues :+ value)) - }).map(e => OutputEntry(e._1, e._2))) + if (tag.value.isDefined) + entryMap + (tag.predicate -> (outputValues :+ OutputValue(tag.value.get, tag.description.getOrElse("")))) + else + entryMap + (tag.predicate -> outputValues) + }).map(e => OutputEntry(e._1, e._2)).toSeq) .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index feffe865bb..1683c010ad 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -14,6 +14,7 @@ class Router @Inject() ( taskCtrl: TaskCtrl, customFieldCtrl: CustomFieldCtrl, alertCtrl: AlertCtrl, + taxonomyCtrl: TaxonomyCtrl, auditCtrl: AuditCtrl, statusCtrl: StatusCtrl, authenticationCtrl: AuthenticationCtrl, @@ -90,6 +91,13 @@ class Router @Inject() ( // DELETE /alert/:alertId controllers.AlertCtrl.delete(alertId) // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) + case GET(p"/taxonomy") => taxonomyCtrl.list + case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) + case POST(p"/taxonomy") => taxonomyCtrl.create + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + // case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.activate + // case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.deactivate + case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) // GET /audit controllers.AuditCtrl.find() diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index d60db4d141..6a8220514c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -2,7 +2,7 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} import org.thp.scalligraph.{EntityIdOrName, RichSeq} -import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs @@ -15,7 +15,7 @@ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.mvc.{Action, AnyContent, Results} -import scala.+: +import scala.util.Success class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -42,55 +42,73 @@ class TaxonomyCtrl @Inject() ( Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( "page", FieldsParser[OutputParam], - (range, traversal, authContext) => - traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy(authContext)) + (range, traversal, _) => + traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy) ) override val outputQuery: Query = - Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, authContext) => - traversal.richTaxonomy(authContext) + Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, _) => + traversal.richTaxonomy ) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags) ) - def importTaxonomy: Action[AnyContent] = - entrypoint("import taxonomy") - .extract("file", FieldsParser.file.optional.on("file")) - .extract("taxonomy", FieldsParser[InputTaxonomy]) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val file: Option[FFile] = request.body("file") - val inputTaxo: InputTaxonomy = request.body("taxonomy") - - // TODO Parse file & combine with body + def list: Action[AnyContent] = + entrypoint("list taxonomies") + .authRoTransaction(db) { implicit request => implicit graph => + val taxos = taxonomySrv + .startTraversal + .visible + .richTaxonomy + .toSeq + Success(Results.Ok(taxos.toJson)) + } - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + def create: Action[AnyContent] = + entrypoint("import taxonomy") + .extract("taxonomy", FieldsParser[InputTaxonomy]) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val inputTaxo: InputTaxonomy = request.body("taxonomy") - // Create tags - val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => - Tag(inputTaxo.namespace, - value.predicate, - Some(e.value), - Some(e.expanded), - e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour)) + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) - ) - } - ) + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + Some(e.expanded), + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) + ) + ) + }) - // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) - tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) - ) + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val allTags = tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + ) - for { - tagsEntities <- tags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) } + def get(taxonomyId: String): Action[AnyContent] = + entrypoint("get taxonomy") + .authRoTransaction(db) { implicit request => implicit graph => + taxonomySrv + .get(EntityIdOrName(taxonomyId)) + .visible + .richTaxonomy + .getOrFail("Taxonomy") + .map(taxonomy => Results.Ok(taxonomy.toJson)) + } + +/* def delete(namespace: String): Action[AnyContent] = entrypoint("delete taxonomy") .authTransaction(db) { implicit request => implicit graph => @@ -99,5 +117,6 @@ class TaxonomyCtrl @Inject() ( } yield Results.Nocontent } +*/ } diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index de57993ad3..af57429268 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,7 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") - lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "organisation", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index c9ece8d6fc..5353be8aaf 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -1,6 +1,7 @@ package org.thp.thehive.models import java.lang.reflect.Modifier +import java.util.Date import javax.inject.{Inject, Singleton} import org.apache.tinkerpop.gremlin.process.traversal.P @@ -92,7 +93,12 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { db.tryTransaction { g => db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => val taxoVertex = g.addVertex("Taxonomy") - taxoVertex.property("namespace", "Custom") + taxoVertex.property("_label", "Taxonomy") + taxoVertex.property("_createdBy", "???") // TODO What user should be used ? + taxoVertex.property("_createdAt", new Date()) + taxoVertex.property("namespace", "custom") + taxoVertex.property("description", "Custom taxonomy") + taxoVertex.property("version", 1) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) } @@ -107,13 +113,18 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), _.in("AlertOrganisation").out("AlertTag"), _.in("CaseTemplateOrganisation").out("CaseTemplateTag") - ).toSeq.foreach(tag => + ).toSeq.foreach { tag => + tag.property("namespace", "custom") customTaxo.addEdge("TaxonomyTag", tag) - ) + } Success(()) } }.map(_ => ()) } + .updateGraph("Add manageTaxonomy to org-admin profile", "Profile") { traversal => + Try(traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageTaxonomy").iterate()) + Success(()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index e74b249a54..026f8a9554 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -3,7 +3,7 @@ package org.thp.thehive.services import java.util.{Map => JMap} import akka.actor.ActorRef -import javax.inject.{Inject, Named, Singleton} +import javax.inject.{Inject, Named, Provider, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.{AuthContext, Permission} import org.thp.scalligraph.models._ @@ -23,6 +23,7 @@ import scala.util.{Failure, Success, Try} @Singleton class OrganisationSrv @Inject() ( + taxonomySrvProvider: Provider[TaxonomySrv], roleSrv: RoleSrv, profileSrv: ProfileSrv, auditSrv: AuditSrv, @@ -31,9 +32,9 @@ class OrganisationSrv @Inject() ( )(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Organisation] { - - val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] - val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] + lazy val taxonomySrv: TaxonomySrv = taxonomySrvProvider.get + val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] + val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { integrityCheckActor ! IntegrityCheckActor.EntityAdded("Organisation") @@ -51,6 +52,7 @@ class OrganisationSrv @Inject() ( def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = for { createdOrganisation <- createEntity(e) + _ <- taxonomySrv.create(Taxonomy("custom", "Custom taxonomy", 1), Seq()) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 8a7e906979..55117defea 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -2,7 +2,7 @@ package org.thp.thehive.services import java.util.{Map => JMap} -import javax.inject.{Inject, Named} +import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.{Database, Entity} @@ -17,8 +17,7 @@ import scala.util.Try @Singleton class TaxonomySrv @Inject() ( - organisationSrv: OrganisationSrv, - tagSrv: TagSrv + organisationSrv: OrganisationSrv )(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Taxonomy] { @@ -33,6 +32,7 @@ class TaxonomySrv @Inject() ( _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + /* def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = @@ -55,7 +55,7 @@ object TaxonomyOps { def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag] - def richTaxonomy(implicit authContext: AuthContext): Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = + def richTaxonomy: Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = traversal .project( _.by From f285f2be966affd898b1a9ed925cc07b625a2354 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 16:47:17 +0100 Subject: [PATCH 27/93] WIP Custom taxonomy when new organisation is created --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 2 -- .../thp/thehive/models/TheHiveSchemaDefinition.scala | 2 +- .../app/org/thp/thehive/services/OrganisationSrv.scala | 2 +- thehive/app/org/thp/thehive/services/TaxonomySrv.scala | 10 +++++++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index f0ebfb9659..a73243a59e 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -22,8 +22,6 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) object InputEntry { - implicitly[FieldsParser[Option[Seq[InputEntry]]]] - implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 5353be8aaf..1a436fba6f 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -94,7 +94,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") - taxoVertex.property("_createdBy", "???") // TODO What user should be used ? + taxoVertex.property("_createdBy", "system@thehive.local") taxoVertex.property("_createdAt", new Date()) taxoVertex.property("namespace", "custom") taxoVertex.property("description", "Custom taxonomy") diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 026f8a9554..e2a8c61068 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -52,7 +52,7 @@ class OrganisationSrv @Inject() ( def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = for { createdOrganisation <- createEntity(e) - _ <- taxonomySrv.create(Taxonomy("custom", "Custom taxonomy", 1), Seq()) + _ <- taxonomySrv.createWithOrg(Taxonomy("custom", "Custom taxonomy", 1), Seq(), createdOrganisation) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 55117defea..28734aefb9 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -26,8 +26,16 @@ class TaxonomySrv @Inject() ( def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - taxonomy <- createEntity(taxo) organisation <- organisationSrv.getOrFail(authContext.organisation) + richTaxonomy <- createWithOrg(taxo, tags, organisation) + } yield richTaxonomy + + def createWithOrg(taxo: Taxonomy, + tags: Seq[Tag with Entity], + organisation: Organisation with Entity) + (implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + for { + taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) From 8a4d73eb5680a792dfaa3cba21ecbe09bca98271 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 19:02:21 +0100 Subject: [PATCH 28/93] WIP Added taxonomy activate / deactivate --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 2 +- .../org/thp/thehive/controllers/v1/Conversion.scala | 2 ++ .../app/org/thp/thehive/controllers/v1/Router.scala | 6 +++--- .../org/thp/thehive/controllers/v1/TaxonomyCtrl.scala | 10 +++++++++- thehive/app/org/thp/thehive/models/Taxonomy.scala | 4 +++- .../thp/thehive/models/TheHiveSchemaDefinition.scala | 3 ++- .../app/org/thp/thehive/services/OrganisationSrv.scala | 6 ++++-- thehive/app/org/thp/thehive/services/TaxonomySrv.scala | 10 +++++++++- 8 files changed, 33 insertions(+), 10 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index a73243a59e..fe8eaa467c 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -8,7 +8,6 @@ import org.thp.scalligraph.InvalidFormatAttributeError import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} -// TODO make sure of input format case class InputTaxonomy ( namespace: String, description: String, @@ -47,6 +46,7 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, + enabled: Boolean, predicates: Seq[String], values: Seq[OutputEntry] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index c6905ff5cd..a3291bc422 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -260,6 +260,7 @@ object Conversion { .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) + .withFieldConst(_.enabled, false) // TODO always false when importing a taxonomy ? .transform } @@ -271,6 +272,7 @@ object Conversion { .withFieldComputed(_.namespace, _.namespace) .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) + .withFieldComputed(_.enabled, _.enabled) .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { val outputValues = entryMap.getOrElse(tag.predicate, Seq()) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 1683c010ad..7fd69f6291 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,9 +94,9 @@ class Router @Inject() ( case GET(p"/taxonomy") => taxonomyCtrl.list case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip - // case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.activate - // case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.deactivate + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip< + case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) + case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 6a8220514c..b2b3ff7136 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -70,7 +70,7 @@ class TaxonomyCtrl @Inject() ( .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => val inputTaxo: InputTaxonomy = request.body("taxonomy") - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version) + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) // Create tags val tagValues = inputTaxo.values.getOrElse(Seq()) @@ -108,6 +108,14 @@ class TaxonomyCtrl @Inject() ( .map(taxonomy => Results.Ok(taxonomy.toJson)) } + def setEnabled(taxonomyId: String, isEnabled: Boolean): Action[AnyContent] = + entrypoint("toggle taxonomy") + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + taxonomySrv + .setEnabled(EntityIdOrName(taxonomyId), isEnabled) + .map(_ => Results.NoContent) + } + /* def delete(namespace: String): Action[AnyContent] = entrypoint("delete taxonomy") diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 925956d9cd..7a8f9a46c2 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -9,7 +9,8 @@ import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} case class Taxonomy( namespace: String, description: String, - version: Int + version: Int, + enabled: Boolean ) @BuildEdgeEntity[Taxonomy, Tag] @@ -27,4 +28,5 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version + def enabled: Boolean = taxonomy.enabled } diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 1a436fba6f..78c439feb8 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -99,6 +99,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("namespace", "custom") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) + taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) } @@ -107,7 +108,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "Custom").head + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "custom").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index e2a8c61068..05f3889499 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -49,12 +49,14 @@ class OrganisationSrv @Inject() ( _ <- roleSrv.create(user, createdOrganisation, profileSrv.orgAdmin) } yield createdOrganisation - def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = + def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { + val customTaxo = Taxonomy("custom", "Custom taxonomy", 1, enabled = true) for { createdOrganisation <- createEntity(e) - _ <- taxonomySrv.createWithOrg(Taxonomy("custom", "Custom taxonomy", 1), Seq(), createdOrganisation) + _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), createdOrganisation) _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) } yield createdOrganisation + } def current(implicit graph: Graph, authContext: AuthContext): Traversal.V[Organisation] = get(authContext.organisation) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 28734aefb9..42003e4f54 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -9,7 +9,7 @@ import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{Converter, Traversal} -import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.{EntityId, EntityIdOrName, RichSeq} import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ @@ -41,6 +41,11 @@ class TaxonomySrv @Inject() ( richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = + for { + _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") + } yield () + /* def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = @@ -52,6 +57,9 @@ class TaxonomySrv @Inject() ( object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { + def get(idOrName: EntityId): Traversal.V[Taxonomy] = + traversal.getByIds(idOrName) + def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) From a092f15b515bfe7f2e376e7f6d28feabfdf8fa55 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 16 Nov 2020 19:08:08 +0100 Subject: [PATCH 29/93] Fixed taxonomy values parsing --- .../main/scala/org/thp/thehive/dto/v1/Taxonomy.scala | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index fe8eaa467c..20890915a5 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,11 +2,7 @@ package org.thp.thehive.dto.v1 import java.util.Date -import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable -import org.scalactic.{Bad, Good, One} -import org.thp.scalligraph.InvalidFormatAttributeError -import org.thp.scalligraph.controllers.{FObject, FSeq, FieldsParser, WithParser} -import play.api.libs.json.{JsArray, JsObject, JsString, Json, OFormat, OWrites, Writes} +import play.api.libs.json.{Json, OFormat, OWrites, Writes} case class InputTaxonomy ( namespace: String, @@ -21,14 +17,10 @@ case class InputEntry(predicate: String, entry: Seq[InputValue]) case class InputValue(value: String, expanded: String, colour: Option[String]) object InputEntry { - implicit val parser: FieldsParser[InputEntry] = FieldsParser[InputEntry] - implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] } object InputValue { - implicit val parser: FieldsParser[InputValue] = FieldsParser[InputValue] - implicit val writes: Writes[InputValue] = Json.writes[InputValue] } From 8863acb1ef31c8602ebbe2bdde8fc0a54b85f13c Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 11:42:50 +0100 Subject: [PATCH 30/93] Idempotent TheHive schema --- .../thp/thehive/controllers/v1/Router.scala | 2 +- .../models/TheHiveSchemaDefinition.scala | 27 +++++++++++-------- .../thehive/controllers/v1/UserCtrlTest.scala | 1 + 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 7fd69f6291..f3184dffdb 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,7 +94,7 @@ class Router @Inject() ( case GET(p"/taxonomy") => taxonomyCtrl.list case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip< + // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 78c439feb8..297d6f35bb 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -91,17 +91,22 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .addVertexModel[String]("Taxonomy", Seq("namespace")) .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => db.tryTransaction { g => - db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => - val taxoVertex = g.addVertex("Taxonomy") - taxoVertex.property("_label", "Taxonomy") - taxoVertex.property("_createdBy", "system@thehive.local") - taxoVertex.property("_createdAt", new Date()) - taxoVertex.property("namespace", "custom") - taxoVertex.property("description", "Custom taxonomy") - taxoVertex.property("version", 1) - taxoVertex.property("enabled", true) - o.addEdge("OrganisationTaxonomy", taxoVertex) - Success(()) + // If there are no taxonomies in database, add a custom one for each organisation + db.labelFilter("Taxonomy")(Traversal.V()(g)).headOption match { + case None => + db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + val taxoVertex = g.addVertex("Taxonomy") + taxoVertex.property("_label", "Taxonomy") + taxoVertex.property("_createdBy", "system@thehive.local") + taxoVertex.property("_createdAt", new Date()) + taxoVertex.property("namespace", "custom") + taxoVertex.property("description", "Custom taxonomy") + taxoVertex.property("version", 1) + taxoVertex.property("enabled", true) + o.addEdge("OrganisationTaxonomy", taxoVertex) + Success(()) + } + case _ => Success(()) } }.map(_ => ()) } diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala index 8a5773b794..e1831040c5 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala @@ -109,6 +109,7 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder { Permissions.managePage, Permissions.manageObservable, Permissions.manageAlert, + Permissions.manageTaxonomy, Permissions.manageAction, Permissions.manageConfig, Permissions.accessTheHiveFS From 2be85d8cd8246563424c6282ab59d175a1396830 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 14:23:35 +0100 Subject: [PATCH 31/93] Checked if taxonomy namespace is present before creating --- .../thp/thehive/controllers/v1/TaxonomyCtrl.scala | 15 +++++++++------ .../org/thp/thehive/services/TaxonomySrv.scala | 9 +++++++++ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index b2b3ff7136..844dbc311b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -1,7 +1,7 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} -import org.thp.scalligraph.{EntityIdOrName, RichSeq} +import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ @@ -15,7 +15,7 @@ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.mvc.{Action, AnyContent, Results} -import scala.util.Success +import scala.util.{Failure, Success} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -91,10 +91,13 @@ class TaxonomyCtrl @Inject() ( Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) ) - for { - tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + else + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield Results.Created(richTaxonomy.toJson) } def get(taxonomyId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 42003e4f54..49ee30d3d3 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -24,6 +24,15 @@ class TaxonomySrv @Inject() ( val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] + def existsInOrganisation(namespace: String)(implicit graph: Graph, authContext: AuthContext): Boolean = { + startTraversal + .has(_.namespace, namespace) + .in[OrganisationTaxonomy] + .v[Organisation] + .has(_.name, authContext.organisation.toString) // TODO not great + .exists + } + def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { organisation <- organisationSrv.getOrFail(authContext.organisation) From 54218aae48cedf1eeb84b6bf0beca92f07d8845c Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 15:09:28 +0100 Subject: [PATCH 32/93] Correct output format for taxonomies --- .../scala/org/thp/thehive/dto/v1/Tag.scala | 15 ++++++++++++++ .../org/thp/thehive/dto/v1/Taxonomy.scala | 15 +------------- .../thehive/controllers/v1/Conversion.scala | 20 +++++++++++-------- thehive/app/org/thp/thehive/models/Tag.scala | 19 ++++++++++++++++-- .../app/org/thp/thehive/models/Taxonomy.scala | 2 +- .../thp/thehive/services/TaxonomySrv.scala | 10 ++-------- 6 files changed, 48 insertions(+), 33 deletions(-) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala new file mode 100644 index 0000000000..3b536c867c --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala @@ -0,0 +1,15 @@ +package org.thp.thehive.dto.v1 + +import play.api.libs.json.{Json, OFormat} + +case class OutputTag( + namespace: String, + predicate: String, + value: Option[String], + description: Option[String], + colour: Int +) + +object OutputTag { + implicit val format: OFormat[OutputTag] = Json.format[OutputTag] +} diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 20890915a5..70f0b23208 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -39,22 +39,9 @@ case class OutputTaxonomy( description: String, version: Int, enabled: Boolean, - predicates: Seq[String], - values: Seq[OutputEntry] + tags: Seq[OutputTag] ) -case class OutputEntry(predicate: String, entry: Seq[OutputValue]) - -case class OutputValue(value: String, expanded: String) - object OutputTaxonomy { implicit val format: OFormat[OutputTaxonomy] = Json.format[OutputTaxonomy] } - -object OutputEntry { - implicit val format: OFormat[OutputEntry] = Json.format[OutputEntry] -} - -object OutputValue { - implicit val format: OFormat[OutputValue] = Json.format[OutputValue] -} \ No newline at end of file diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index a3291bc422..db5d575096 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -273,14 +273,18 @@ object Conversion { .withFieldComputed(_.description, _.description) .withFieldComputed(_.version, _.version) .withFieldComputed(_.enabled, _.enabled) - .withFieldComputed(_.predicates, _.tags.map(_.predicate).distinct) - .withFieldComputed(_.values, _.tags.foldLeft(Map[String, Seq[OutputValue]]())((entryMap, tag) => { - val outputValues = entryMap.getOrElse(tag.predicate, Seq()) - if (tag.value.isDefined) - entryMap + (tag.predicate -> (outputValues :+ OutputValue(tag.value.get, tag.description.getOrElse("")))) - else - entryMap + (tag.predicate -> outputValues) - }).map(e => OutputEntry(e._1, e._2)).toSeq) + .withFieldComputed(_.tags, _.tags.map(_.toOutput)) + .transform + ) + + implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = + Renderer.toJson[RichTag, OutputTag]( + _.into[OutputTag] + .withFieldComputed(_.namespace, _.namespace) + .withFieldComputed(_.predicate, _.predicate) + .withFieldComputed(_.value, _.value) + .withFieldComputed(_.description, _.description) + .withFieldComputed(_.colour, _.colour) .transform ) diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala index e188ee45c2..cc97dc317e 100644 --- a/thehive/app/org/thp/thehive/models/Tag.scala +++ b/thehive/app/org/thp/thehive/models/Tag.scala @@ -1,7 +1,9 @@ package org.thp.thehive.models -import org.thp.scalligraph.BuildVertexEntity -import org.thp.scalligraph.models.{DefineIndex, IndexType} +import java.util.Date + +import org.thp.scalligraph.{BuildVertexEntity, EntityId} +import org.thp.scalligraph.models.{DefineIndex, Entity, IndexType} import play.api.Logger import scala.util.Try @@ -54,3 +56,16 @@ object Tag { } } } + +case class RichTag(tag: Tag with Entity) { + def _id: EntityId = tag._id + def _createdBy: String = tag._createdBy + def _updatedBy: Option[String] = tag._updatedBy + def _createdAt: Date = tag._createdAt + def _updatedAt: Option[Date] = tag._updatedAt + def namespace: String = tag.namespace + def predicate: String = tag.predicate + def value: Option[String] = tag.value + def description: Option[String] = tag.description + def colour: Int = tag.colour +} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index 7a8f9a46c2..a7815963e6 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -18,7 +18,7 @@ case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - tags: Seq[Tag with Entity] + tags: Seq[RichTag] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 49ee30d3d3..1b29f20081 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -47,7 +47,7 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags.map(RichTag))) } yield richTaxonomy def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = @@ -55,12 +55,6 @@ class TaxonomySrv @Inject() ( _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") } yield () -/* - - def getByNamespace(namespace: String)(implicit graph: Graph): Traversal.V[Taxonomy] = - Try(startTraversal.getByNamespace(namespace)).getOrElse(startTraversal.limit(0)) -*/ - } object TaxonomyOps { @@ -86,6 +80,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags.map(RichTag)) } } } From db3a94456150cc7f0a85cf1f03fb769d97f1eb91 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 17 Nov 2020 15:31:29 +0100 Subject: [PATCH 33/93] Query for taxonomies --- thehive/app/org/thp/thehive/controllers/v1/Router.scala | 3 +-- .../org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index f3184dffdb..1d49363c75 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -91,10 +91,9 @@ class Router @Inject() ( // DELETE /alert/:alertId controllers.AlertCtrl.delete(alertId) // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) - case GET(p"/taxonomy") => taxonomyCtrl.list - case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case POST(p"/taxonomy") => taxonomyCtrl.create // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index bbc3b86b81..27fed93b12 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -32,6 +32,7 @@ class TheHiveQueryExecutor @Inject() ( profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, userCtrl: UserCtrl, + taxonomyCtrl: TaxonomyCtrl, // dashboardCtrl: DashboardCtrl, properties: Properties, @Named("with-thehive-schema") implicit val db: Database @@ -53,7 +54,8 @@ class TheHiveQueryExecutor @Inject() ( profileCtrl, // tagCtrl, taskCtrl, - userCtrl + userCtrl, + taxonomyCtrl ) override val version: (Int, Int) = 1 -> 1 From c5e138538e70fac085fbb1eb6e74be9ad0e6b9d8 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 18 Nov 2020 12:13:01 +0100 Subject: [PATCH 34/93] Basic zip import --- .../org/thp/thehive/dto/v1/Taxonomy.scala | 49 +++++++--- .../thp/thehive/controllers/v1/Router.scala | 2 +- .../thehive/controllers/v1/TaxonomyCtrl.scala | 91 +++++++++++-------- 3 files changed, 90 insertions(+), 52 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 70f0b23208..576683127a 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -2,30 +2,57 @@ package org.thp.thehive.dto.v1 import java.util.Date -import play.api.libs.json.{Json, OFormat, OWrites, Writes} +import play.api.libs.json.{Json, OFormat} -case class InputTaxonomy ( +/* +Format based on : +https://tools.ietf.org/id/draft-dulaunoy-misp-taxonomy-format-04.html +*/ + +case class InputTaxonomy( namespace: String, description: String, version: Int, - predicates: Seq[String], - values: Option[Seq[InputEntry]] + `type`: Option[Seq[String]], + exclusive: Option[Boolean], + predicates: Seq[InputPredicate], + values: Option[Seq[InputValue]] +) + +case class InputPredicate( + value: String, + expanded: Option[String], + exclusive: Option[Boolean], + description: Option[String] ) -case class InputEntry(predicate: String, entry: Seq[InputValue]) +case class InputValue( + predicate: String, + entry: Seq[InputEntry] +) -case class InputValue(value: String, expanded: String, colour: Option[String]) +case class InputEntry( + value: String, + expanded: Option[String], + colour: Option[String], + description: Option[String], + numerical_value: Option[Int] +) -object InputEntry { - implicit val writes: Writes[InputEntry] = Json.writes[InputEntry] +object InputTaxonomy { + implicit val format: OFormat[InputTaxonomy] = Json.format[InputTaxonomy] +} + +object InputPredicate { + implicit val format: OFormat[InputPredicate] = Json.format[InputPredicate] } object InputValue { - implicit val writes: Writes[InputValue] = Json.writes[InputValue] + implicit val format: OFormat[InputValue] = Json.format[InputValue] } -object InputTaxonomy { - implicit val writes: OWrites[InputTaxonomy] = Json.writes[InputTaxonomy] +object InputEntry { + implicit val format: OFormat[InputEntry] = Json.format[InputEntry] } case class OutputTaxonomy( diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 1d49363c75..324df8eb8d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -92,7 +92,7 @@ class Router @Inject() ( // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) case POST(p"/taxonomy") => taxonomyCtrl.create - // case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip + case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 844dbc311b..5946240540 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -1,21 +1,25 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} -import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} -import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} +import net.lingala.zip4j.ZipFile +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ -import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.TraversalOps.{TraversalOpsDefs, logger} import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} -import scala.util.{Failure, Success} +import scala.util.{Failure, Success, Try} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -68,36 +72,54 @@ class TaxonomyCtrl @Inject() ( entrypoint("import taxonomy") .extract("taxonomy", FieldsParser[InputTaxonomy]) .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - val inputTaxo: InputTaxonomy = request.body("taxonomy") + for { + richTaxonomy <- createFromInput(request.body("taxonomy")) + } yield Results.Created(richTaxonomy.toJson) + } - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) + def importZip: Action[AnyContent] = + entrypoint("import taxonomies zip") + .extract("file", FieldsParser.file.on("file")) + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + val file: FFile = request.body("file") + val zipFile = new ZipFile(file.filepath.toString) + zipFile.getFileHeaders.stream.forEach { fileHeader => + val json = Json.parse(zipFile.getInputStream(fileHeader)) + createFromInput(json.as[InputTaxonomy]) + } + + Success(Results.NoContent) + } - // Create tags - val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => - Tag(inputTaxo.namespace, - value.predicate, - Some(e.value), - Some(e.expanded), - e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) - ) - ) - }) + private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { + val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) - // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) - val allTags = tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) + // Create tags + val tagValues = inputTaxo.values.getOrElse(Seq()) + val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { + all ++ value.entry.map(e => + Tag(inputTaxo.namespace, + value.predicate, + Some(e.value), + e.expanded, + e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) ) + ) + }) - if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) - Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) - else - for { - tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) - } yield Results.Created(richTaxonomy.toJson) + // Create a tag for predicates with no tags associated + val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val allTags = tags ++ predicateWithNoTags.map(p => + Tag(inputTaxo.namespace, p.value, None, None, tagSrv.defaultColour) + ) + + if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + else + for { + tagsEntities <- allTags.toTry(t => tagSrv.create(t)) + richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + } yield richTaxonomy } def get(taxonomyId: String): Action[AnyContent] = @@ -119,15 +141,4 @@ class TaxonomyCtrl @Inject() ( .map(_ => Results.NoContent) } -/* - def delete(namespace: String): Action[AnyContent] = - entrypoint("delete taxonomy") - .authTransaction(db) { implicit request => implicit graph => - for { - t <- taxonomySrv.getByNamespace(namespace) - - } yield Results.Nocontent - } -*/ - } From 120823564392bf32c09f8aa30068f731eaab4552 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 18 Nov 2020 17:32:51 +0100 Subject: [PATCH 35/93] Handled zip import errors --- .../thehive/controllers/v1/Conversion.scala | 14 +-------- .../thehive/controllers/v1/Properties.scala | 3 +- .../thehive/controllers/v1/TaxonomyCtrl.scala | 31 +++++++++++++------ .../thp/thehive/services/TaxonomySrv.scala | 2 +- 4 files changed, 25 insertions(+), 25 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index db5d575096..aef3fc1eea 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -257,10 +257,7 @@ object Conversion { def toTaxonomy: Taxonomy = inputTaxonomy .into[Taxonomy] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldConst(_.enabled, false) // TODO always false when importing a taxonomy ? + .withFieldConst(_.enabled, false) .transform } @@ -269,10 +266,6 @@ object Conversion { _.into[OutputTaxonomy] .withFieldComputed(_._id, _._id.toString) .withFieldConst(_._type, "Taxonomy") - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.version, _.version) - .withFieldComputed(_.enabled, _.enabled) .withFieldComputed(_.tags, _.tags.map(_.toOutput)) .transform ) @@ -280,11 +273,6 @@ object Conversion { implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = Renderer.toJson[RichTag, OutputTag]( _.into[OutputTag] - .withFieldComputed(_.namespace, _.namespace) - .withFieldComputed(_.predicate, _.predicate) - .withFieldComputed(_.value, _.value) - .withFieldComputed(_.description, _.description) - .withFieldComputed(_.colour, _.colour) .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 9832b6b6f3..e8095fa781 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -497,8 +497,7 @@ class Properties @Inject() ( .property("namespace", UMapping.string)(_.field.readonly) .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) - // Predicates ? - // Values ? + .property("enabled", UMapping.boolean)(_.field.readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 5946240540..5a48e28d8a 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -2,14 +2,15 @@ package org.thp.thehive.controllers.v1 import javax.inject.{Inject, Named} import net.lingala.zip4j.ZipFile +import net.lingala.zip4j.model.FileHeader import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} import org.thp.scalligraph.models.Database import org.thp.scalligraph.query._ -import org.thp.scalligraph.traversal.TraversalOps.{TraversalOpsDefs, logger} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} -import org.thp.scalligraph.{CreateError, EntityIdOrName, RichSeq} +import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} @@ -19,6 +20,7 @@ import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} +import scala.collection.JavaConverters._ import scala.util.{Failure, Success, Try} class TaxonomyCtrl @Inject() ( @@ -80,17 +82,28 @@ class TaxonomyCtrl @Inject() ( def importZip: Action[AnyContent] = entrypoint("import taxonomies zip") .extract("file", FieldsParser.file.on("file")) - .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + .authPermitted(Permissions.manageTaxonomy) { implicit request => val file: FFile = request.body("file") val zipFile = new ZipFile(file.filepath.toString) - zipFile.getFileHeaders.stream.forEach { fileHeader => - val json = Json.parse(zipFile.getInputStream(fileHeader)) - createFromInput(json.as[InputTaxonomy]) - } + val headers = zipFile + .getFileHeaders + .iterator() + .asScala - Success(Results.NoContent) + for { + inputTaxos <- headers.toTry(h => parseJsonFile(zipFile, h)) + richTaxos <- db.tryTransaction { implicit graph => + inputTaxos.toTry(inputTaxo => createFromInput(inputTaxo)).map(_.toJson) + } + } yield Results.Created(richTaxos) } + private def parseJsonFile(zipFile: ZipFile, h: FileHeader): Try[InputTaxonomy] = { + Try(Json.parse(zipFile.getInputStream(h)).as[InputTaxonomy]).recoverWith { + case _ => Failure(BadRequestError(s"File '${h.getFileName}' does not comply with the MISP taxonomy formatting")) + } + } + private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) @@ -114,7 +127,7 @@ class TaxonomyCtrl @Inject() ( ) if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) - Failure(CreateError("A taxonomy with this namespace already exists in this organisation")) + Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation")) else for { tagsEntities <- allTags.toTry(t => tagSrv.create(t)) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 1b29f20081..062d96b4e4 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -29,7 +29,7 @@ class TaxonomySrv @Inject() ( .has(_.namespace, namespace) .in[OrganisationTaxonomy] .v[Organisation] - .has(_.name, authContext.organisation.toString) // TODO not great + .current .exists } From de6f2df5f875ca5bbe8d04f5ce723d416627356f Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 19 Nov 2020 11:18:59 +0100 Subject: [PATCH 36/93] Review changes --- .../thp/thehive/controllers/v1/Conversion.scala | 4 ++-- .../thp/thehive/controllers/v1/TaxonomyCtrl.scala | 8 +++----- thehive/app/org/thp/thehive/models/Tag.scala | 13 ------------- thehive/app/org/thp/thehive/models/Taxonomy.scala | 2 +- .../thehive/models/TheHiveSchemaDefinition.scala | 14 +++++++------- .../app/org/thp/thehive/services/TaxonomySrv.scala | 4 ++-- 6 files changed, 15 insertions(+), 30 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index aef3fc1eea..6ba56b3938 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -270,8 +270,8 @@ object Conversion { .transform ) - implicit val tagOutput: Renderer.Aux[RichTag, OutputTag] = - Renderer.toJson[RichTag, OutputTag]( + implicit val tagOutput: Renderer.Aux[Tag, OutputTag] = + Renderer.toJson[Tag, OutputTag]( _.into[OutputTag] .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 5a48e28d8a..10c91d6974 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -105,12 +105,10 @@ class TaxonomyCtrl @Inject() ( } private def createFromInput(inputTaxo: InputTaxonomy)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { - val taxonomy = Taxonomy(inputTaxo.namespace, inputTaxo.description, inputTaxo.version, enabled = false) - // Create tags val tagValues = inputTaxo.values.getOrElse(Seq()) - val tags = tagValues.foldLeft(Seq[Tag]())((all, value) => { - all ++ value.entry.map(e => + val tags = tagValues.flatMap(value => { + value.entry.map(e => Tag(inputTaxo.namespace, value.predicate, Some(e.value), @@ -131,7 +129,7 @@ class TaxonomyCtrl @Inject() ( else for { tagsEntities <- allTags.toTry(t => tagSrv.create(t)) - richTaxonomy <- taxonomySrv.create(taxonomy, tagsEntities) + richTaxonomy <- taxonomySrv.create(inputTaxo.toTaxonomy, tagsEntities) } yield richTaxonomy } diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala index cc97dc317e..3ad58979a5 100644 --- a/thehive/app/org/thp/thehive/models/Tag.scala +++ b/thehive/app/org/thp/thehive/models/Tag.scala @@ -56,16 +56,3 @@ object Tag { } } } - -case class RichTag(tag: Tag with Entity) { - def _id: EntityId = tag._id - def _createdBy: String = tag._createdBy - def _updatedBy: Option[String] = tag._updatedBy - def _createdAt: Date = tag._createdAt - def _updatedAt: Option[Date] = tag._updatedAt - def namespace: String = tag.namespace - def predicate: String = tag.predicate - def value: Option[String] = tag.value - def description: Option[String] = tag.description - def colour: Int = tag.colour -} diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index a7815963e6..bc4fb1a6d4 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -18,7 +18,7 @@ case class TaxonomyTag() case class RichTaxonomy( taxonomy: Taxonomy with Entity, - tags: Seq[RichTag] + tags: Seq[Tag] ) { def _id: EntityId = taxonomy._id def _createdBy: String = taxonomy._createdBy diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 297d6f35bb..6a206703e9 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -90,11 +90,11 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { // Taxonomies .addVertexModel[String]("Taxonomy", Seq("namespace")) .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => - db.tryTransaction { g => - // If there are no taxonomies in database, add a custom one for each organisation - db.labelFilter("Taxonomy")(Traversal.V()(g)).headOption match { - case None => - db.labelFilter("Organisation")(Traversal.V()(g)).toIterator.toTry { o => + db.tryTransaction { implicit g => + // For each organisation, if there is no custom taxonomy, create it + db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "custom").headOption match { + case None => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") taxoVertex.property("_createdBy", "system@thehive.local") @@ -105,8 +105,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) - } - case _ => Success(()) + case _ => Success(()) + } } }.map(_ => ()) } diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 062d96b4e4..4b4b7c28a2 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -47,7 +47,7 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags.map(RichTag))) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = @@ -80,6 +80,6 @@ object TaxonomyOps { _.by .by(_.tags.fold) ) - .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags.map(RichTag)) } + .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } } } From a0a4ca4291664b2d3be6080085a6ad0709d0f8cc Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 09:46:17 +0100 Subject: [PATCH 37/93] Added (de)activation & deletion --- .../org/thp/thehive/dto/v1/Taxonomy.scala | 1 - .../thehive/controllers/v1/Conversion.scala | 1 - .../thehive/controllers/v1/Properties.scala | 1 - .../thp/thehive/controllers/v1/Router.scala | 5 +-- .../thehive/controllers/v1/TaxonomyCtrl.scala | 31 ++++++++++++++----- .../org/thp/thehive/models/Permissions.scala | 2 +- .../app/org/thp/thehive/models/Taxonomy.scala | 4 +-- .../models/TheHiveSchemaDefinition.scala | 19 ++++++++++-- .../thehive/services/OrganisationSrv.scala | 13 +++++--- .../thp/thehive/services/TaxonomySrv.scala | 27 +++++++++++++--- 10 files changed, 76 insertions(+), 28 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 576683127a..7081347184 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -65,7 +65,6 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, - enabled: Boolean, tags: Seq[OutputTag] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 6ba56b3938..d2518f459f 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -257,7 +257,6 @@ object Conversion { def toTaxonomy: Taxonomy = inputTaxonomy .into[Taxonomy] - .withFieldConst(_.enabled, false) .transform } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index e8095fa781..2799630705 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -497,7 +497,6 @@ class Properties @Inject() ( .property("namespace", UMapping.string)(_.field.readonly) .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) - .property("enabled", UMapping.boolean)(_.field.readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 324df8eb8d..f3bd9e882b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -94,8 +94,9 @@ class Router @Inject() ( case POST(p"/taxonomy") => taxonomyCtrl.create case POST(p"/taxonomy/import-zip") => taxonomyCtrl.importZip case GET(p"/taxonomy/$taxoId") => taxonomyCtrl.get(taxoId) - case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = true) - case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.setEnabled(taxoId, isEnabled = false) + case PUT(p"/taxonomy/$taxoId/activate") => taxonomyCtrl.toggleActivation(taxoId, isActive = true) + case PUT(p"/taxonomy/$taxoId/deactivate") => taxonomyCtrl.toggleActivation(taxoId, isActive = false) + case DELETE(p"/taxonomy/$taxoId") => taxonomyCtrl.delete(taxoId) case GET(p"/audit") => auditCtrl.flow // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 10c91d6974..e81c47a098 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -119,12 +119,16 @@ class TaxonomyCtrl @Inject() ( }) // Create a tag for predicates with no tags associated - val predicateWithNoTags = inputTaxo.predicates.diff(tagValues.map(_.predicate)) + val predicateWithNoTags = inputTaxo.predicates.map(_.value).diff(tagValues.map(_.predicate)) val allTags = tags ++ predicateWithNoTags.map(p => - Tag(inputTaxo.namespace, p.value, None, None, tagSrv.defaultColour) + Tag(inputTaxo.namespace, p, None, None, tagSrv.defaultColour) ) - if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + if (inputTaxo.namespace.isEmpty) + Failure(BadRequestError(s"A taxonomy with no namespace cannot be imported")) + else if (inputTaxo.namespace == "_freetags") + Failure(BadRequestError(s"Namespace _freetags is restricted for TheHive")) + else if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation")) else for { @@ -144,12 +148,25 @@ class TaxonomyCtrl @Inject() ( .map(taxonomy => Results.Ok(taxonomy.toJson)) } - def setEnabled(taxonomyId: String, isEnabled: Boolean): Action[AnyContent] = + def toggleActivation(taxonomyId: String, isActive: Boolean): Action[AnyContent] = entrypoint("toggle taxonomy") .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => - taxonomySrv - .setEnabled(EntityIdOrName(taxonomyId), isEnabled) - .map(_ => Results.NoContent) + val toggleF = if (isActive) taxonomySrv.activate _ else taxonomySrv.deactivate _ + toggleF(EntityIdOrName(taxonomyId)).map(_ => Results.NoContent) + } + + def delete(taxoId: String): Action[AnyContent] = + entrypoint("delete taxonomy") + .authPermittedTransaction(db, Permissions.manageTaxonomy) { implicit request => implicit graph => + for { + taxo <- taxonomySrv + .get(EntityIdOrName(taxoId)) + .visible + .getOrFail("Taxonomy") + tags <- Try(taxonomySrv.get(taxo).tags.toSeq) + _ <- tags.toTry(t => tagSrv.delete(t)) + _ <- taxonomySrv.delete(taxo) + } yield Results.NoContent } } diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index af57429268..de57993ad3 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -14,7 +14,7 @@ object Permissions extends Perms { lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") - lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "organisation", "admin") + lazy val manageTaxonomy: PermissionDesc = PermissionDesc("manageTaxonomy", "Manage taxonomies", "admin") lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") diff --git a/thehive/app/org/thp/thehive/models/Taxonomy.scala b/thehive/app/org/thp/thehive/models/Taxonomy.scala index bc4fb1a6d4..e5fcdb0c03 100644 --- a/thehive/app/org/thp/thehive/models/Taxonomy.scala +++ b/thehive/app/org/thp/thehive/models/Taxonomy.scala @@ -9,8 +9,7 @@ import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} case class Taxonomy( namespace: String, description: String, - version: Int, - enabled: Boolean + version: Int ) @BuildEdgeEntity[Taxonomy, Tag] @@ -28,5 +27,4 @@ case class RichTaxonomy( def namespace: String = taxonomy.namespace def description: String = taxonomy.description def version: Int = taxonomy.version - def enabled: Boolean = taxonomy.enabled } diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 6a206703e9..9abe0a84de 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -99,7 +99,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("_label", "Taxonomy") taxoVertex.property("_createdBy", "system@thehive.local") taxoVertex.property("_createdAt", new Date()) - taxoVertex.property("namespace", "custom") + taxoVertex.property("namespace", "_freetags") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) taxoVertex.property("enabled", true) @@ -113,14 +113,22 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "custom").head + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "_freetags").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), _.in("AlertOrganisation").out("AlertTag"), _.in("CaseTemplateOrganisation").out("CaseTemplateTag") ).toSeq.foreach { tag => - tag.property("namespace", "custom") + // Create a freetext tag and store it into predicate + val tagStr = tagString( + tag.property("namespace").value().toString, + tag.property("predicate").value().toString, + tag.property("value").value().toString + ) + tag.property("namespace", "_freetags") + tag.property("predicate", tagStr) + tag.property("value").remove() customTaxo.addEdge("TaxonomyTag", tag) } Success(()) @@ -158,5 +166,10 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { case vertexModel: VertexModel => vertexModel.getInitialValues }.flatten + private def tagString(namespace: String, predicate: String, value: String): String = + (if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') + + (if (predicate.headOption.getOrElse('_') == '_') "" else predicate) + + (if (value.isEmpty) "" else f"""="$value"""") + override def init(db: Database)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = Success(()) } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 05f3889499..0a30d74c74 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -35,6 +35,7 @@ class OrganisationSrv @Inject() ( lazy val taxonomySrv: TaxonomySrv = taxonomySrvProvider.get val organisationOrganisationSrv = new EdgeSrv[OrganisationOrganisation, Organisation, Organisation] val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] + val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { integrityCheckActor ! IntegrityCheckActor.EntityAdded("Organisation") @@ -50,12 +51,14 @@ class OrganisationSrv @Inject() ( } yield createdOrganisation def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { - val customTaxo = Taxonomy("custom", "Custom taxonomy", 1, enabled = true) + val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) + val activeTaxos = getByName("admin").taxonomies.toSeq for { - createdOrganisation <- createEntity(e) - _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), createdOrganisation) - _ <- auditSrv.organisation.create(createdOrganisation, createdOrganisation.toJson) - } yield createdOrganisation + newOrga <- createEntity(e) + _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), newOrga) + _ <- activeTaxos.toTry(t => organisationTaxonomySrv.create(OrganisationTaxonomy(), newOrga, t)) + _ <- auditSrv.organisation.create(newOrga, newOrga.toJson) + } yield newOrga } def current(implicit graph: Graph, authContext: AuthContext): Traversal.V[Organisation] = get(authContext.organisation) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 4b4b7c28a2..b172b44c6b 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -12,8 +12,9 @@ import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.scalligraph.{EntityId, EntityIdOrName, RichSeq} import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.TaxonomyOps._ -import scala.util.Try +import scala.util.{Success, Try} @Singleton class TaxonomySrv @Inject() ( @@ -50,10 +51,28 @@ class TaxonomySrv @Inject() ( richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy - def setEnabled(taxonomyId: EntityIdOrName, isEnabled: Boolean)(implicit graph: Graph): Try[Unit] = + override def getByName(name: String)(implicit graph: Graph): Traversal.V[Taxonomy] = + Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0)) + + def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + for { + taxo <- get(taxonomyId).getOrFail("Taxonomy") + organisations <- Try(organisationSrv.startTraversal.filter(_ + .out[OrganisationTaxonomy] + .filter(_.unsafeHas("namespace", taxo.namespace)) + ).toSeq) + _ <- organisations.toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) + } yield Success(()) + + def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { - _ <- get(taxonomyId).update(_.enabled, isEnabled).getOrFail("Taxonomy") - } yield () + taxo <- get(taxonomyId).getOrFail("Taxonomy") + _ <- Try(organisationSrv + .get(authContext.organisation) + .outE[OrganisationTaxonomy] + .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) + .remove()) + } yield Success(()) } From c53c619c59e09ca51ad119cffb4b7d1c9993d0ba Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 10:26:59 +0100 Subject: [PATCH 38/93] Used correct Scalligraph commit --- ScalliGraph | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ScalliGraph b/ScalliGraph index f6a4d2165c..1a55a0db73 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit f6a4d2165c26826c5b28db1a513ade15dfb060f2 +Subproject commit 1a55a0db730460c6f548695251248934196b6ecc From c87f090bcbb060e62e5527c45b7b25acdc3670dd Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 10:39:38 +0100 Subject: [PATCH 39/93] Edit drone.yml --- .drone.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 16354f6ca6..2b2ebfffeb 100644 --- a/.drone.yml +++ b/.drone.yml @@ -8,7 +8,7 @@ steps: - name: submodules image: alpine/git commands: - - git submodule update --recursive --init --remote + - git submodule update --recursive --init # Restore cache of downloaded dependencies - name: restore-cache From a934a82484725a30f6c3746c31baa9237b6a8101 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 17:07:18 +0100 Subject: [PATCH 40/93] Fixed schema erros & (de)activation --- .../models/TheHiveSchemaDefinition.scala | 10 +- .../thehive/services/OrganisationSrv.scala | 3 +- .../thp/thehive/services/TaxonomySrv.scala | 24 +-- .../org/thp/thehive/DatabaseBuilder.scala | 5 + .../controllers/v1/TaxonomyCtrlTest.scala | 204 ++++++++++++++++++ .../resources/data/OrganisationTaxonomy.json | 5 + thehive/test/resources/data/Tag.json | 7 + thehive/test/resources/data/Taxonomy.json | 8 + thehive/test/resources/data/TaxonomyTag.json | 3 + .../test/resources/machinetag-badformat.zip | Bin 0 -> 4274 bytes .../test/resources/machinetag-otherfiles.zip | Bin 0 -> 3841 bytes thehive/test/resources/machinetag-present.zip | Bin 0 -> 3941 bytes thehive/test/resources/machinetag.zip | Bin 0 -> 4076 bytes 13 files changed, 250 insertions(+), 19 deletions(-) create mode 100644 thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala create mode 100644 thehive/test/resources/data/OrganisationTaxonomy.json create mode 100644 thehive/test/resources/data/Taxonomy.json create mode 100644 thehive/test/resources/data/TaxonomyTag.json create mode 100644 thehive/test/resources/machinetag-badformat.zip create mode 100644 thehive/test/resources/machinetag-otherfiles.zip create mode 100644 thehive/test/resources/machinetag-present.zip create mode 100644 thehive/test/resources/machinetag.zip diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 9abe0a84de..c0542d306f 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -92,8 +92,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add Custom taxonomy vertex for each Organisation") { db => db.tryTransaction { implicit g => // For each organisation, if there is no custom taxonomy, create it - db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => - Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "custom").headOption match { + db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => + Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "_freetags").headOption match { case None => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") @@ -124,7 +124,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { val tagStr = tagString( tag.property("namespace").value().toString, tag.property("predicate").value().toString, - tag.property("value").value().toString + tag.property ("value").orElse("") ) tag.property("namespace", "_freetags") tag.property("predicate", tagStr) @@ -135,8 +135,8 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { } }.map(_ => ()) } - .updateGraph("Add manageTaxonomy to org-admin profile", "Profile") { traversal => - Try(traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageTaxonomy").iterate()) + .updateGraph("Add manageTaxonomy to admin profile", "Profile") { traversal => + Try(traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTaxonomy").iterate()) Success(()) } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 0a30d74c74..c567696848 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -51,11 +51,10 @@ class OrganisationSrv @Inject() ( } yield createdOrganisation def create(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { - val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) val activeTaxos = getByName("admin").taxonomies.toSeq for { newOrga <- createEntity(e) - _ <- taxonomySrv.createWithOrg(customTaxo, Seq(), newOrga) + _ <- taxonomySrv.createFreetag(newOrga) _ <- activeTaxos.toTry(t => organisationTaxonomySrv.create(OrganisationTaxonomy(), newOrga, t)) _ <- auditSrv.organisation.create(newOrga, newOrga.toJson) } yield newOrga diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index b172b44c6b..2051c64930 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -36,20 +36,20 @@ class TaxonomySrv @Inject() ( def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { - organisation <- organisationSrv.getOrFail(authContext.organisation) - richTaxonomy <- createWithOrg(taxo, tags, organisation) + taxonomy <- createEntity(taxo) + _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) + richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) + _ <- activate(richTaxonomy._id) } yield richTaxonomy - def createWithOrg(taxo: Taxonomy, - tags: Seq[Tag with Entity], - organisation: Organisation with Entity) - (implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = + def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { + val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) for { - taxonomy <- createEntity(taxo) + taxonomy <- createEntity(customTaxo) + richTaxonomy <- Try(RichTaxonomy(taxonomy, Seq())) _ <- organisationTaxonomySrv.create(OrganisationTaxonomy(), organisation, taxonomy) - _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) - richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) } yield richTaxonomy + } override def getByName(name: String)(implicit graph: Graph): Traversal.V[Taxonomy] = Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0)) @@ -57,7 +57,7 @@ class TaxonomySrv @Inject() ( def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { taxo <- get(taxonomyId).getOrFail("Taxonomy") - organisations <- Try(organisationSrv.startTraversal.filter(_ + organisations <- Try(organisationSrv.startTraversal.filterNot(_ .out[OrganisationTaxonomy] .filter(_.unsafeHas("namespace", taxo.namespace)) ).toSeq) @@ -67,8 +67,8 @@ class TaxonomySrv @Inject() ( def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { taxo <- get(taxonomyId).getOrFail("Taxonomy") - _ <- Try(organisationSrv - .get(authContext.organisation) + _ <- Try(organisationSrv.startTraversal + .filterNot(_.unsafeHas("name", "admin")) .outE[OrganisationTaxonomy] .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) .remove()) diff --git a/thehive/test/org/thp/thehive/DatabaseBuilder.scala b/thehive/test/org/thp/thehive/DatabaseBuilder.scala index 51767a822f..52094c2147 100644 --- a/thehive/test/org/thp/thehive/DatabaseBuilder.scala +++ b/thehive/test/org/thp/thehive/DatabaseBuilder.scala @@ -35,6 +35,7 @@ class DatabaseBuilder @Inject() ( observableSrv: ObservableSrv, observableTypeSrv: ObservableTypeSrv, taskSrv: TaskSrv, + taxonomySrv: TaxonomySrv, tagSrv: TagSrv, keyValueSrv: KeyValueSrv, dataSrv: DataSrv, @@ -82,11 +83,15 @@ class DatabaseBuilder @Inject() ( createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++ createVertex(attachmentSrv, FieldsParser[Attachment]) ++ createVertex(tagSrv, FieldsParser[Tag]) ++ + createVertex(taxonomySrv, FieldsParser[Taxonomy]) ++ createVertex(pageSrv, FieldsParser[Page]) ++ createVertex(dashboardSrv, FieldsParser[Dashboard]) createEdge(organisationSrv.organisationOrganisationSrv, organisationSrv, organisationSrv, FieldsParser[OrganisationOrganisation], idMap) createEdge(organisationSrv.organisationShareSrv, organisationSrv, shareSrv, FieldsParser[OrganisationShare], idMap) + createEdge(organisationSrv.organisationTaxonomySrv, organisationSrv, taxonomySrv, FieldsParser[OrganisationTaxonomy], idMap) + + createEdge(taxonomySrv.taxonomyTagSrv, taxonomySrv, tagSrv, FieldsParser[TaxonomyTag], idMap) createEdge(roleSrv.userRoleSrv, userSrv, roleSrv, FieldsParser[UserRole], idMap) diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala new file mode 100644 index 0000000000..d08034f2c9 --- /dev/null +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -0,0 +1,204 @@ +package org.thp.thehive.controllers.v1 + +import org.thp.scalligraph.controllers.FakeTemporaryFile +import org.thp.thehive.TestAppBuilder +import org.thp.thehive.dto.v1.{InputEntry, InputPredicate, InputTaxonomy, InputValue, OutputTag, OutputTaxonomy} +import play.api.libs.Files +import play.api.libs.json.Json +import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} +import play.api.mvc.MultipartFormData.FilePart +import play.api.test.{FakeRequest, PlaySpecification} + +case class TestTaxonomy( + namespace: String, + description: String, + version: Int, + tags: List[OutputTag] +) + +object TestTaxonomy { + def apply(outputTaxonomy: OutputTaxonomy): TestTaxonomy = + TestTaxonomy( + outputTaxonomy.namespace, + outputTaxonomy.description, + outputTaxonomy.version, + outputTaxonomy.tags.toList, + ) +} + +class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { + "taxonomy controller" should { + + val inputTaxo = InputTaxonomy( + "test-taxo", + "A test taxonomy", + 1, + None, + None, + List( + InputPredicate("pred1", None, None, None), + InputPredicate("pred2", None, None, None) + ), + Some(List( + InputValue("pred1", List( + InputEntry("entry1", None, None, None, None)) + ), + InputValue("pred2", List( + InputEntry("entry2", None, None, None, None), + InputEntry("entry21", None, None, None, None) + )) + )) + ) + + "create a valid taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(inputTaxo)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val resultCase = contentAsJson(result).as[OutputTaxonomy] + + TestTaxonomy(resultCase) must_=== TestTaxonomy( + "test-taxo", + "A test taxonomy", + 1, + List( + OutputTag("test-taxo", "pred1", Some("entry1"), None, 0), + OutputTag("test-taxo", "pred2", Some("entry2"), None, 0), + OutputTag("test-taxo", "pred2", Some("entry21"), None, 0) + ) + ) + } + + "return error if not admin" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(inputTaxo)) + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(403).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("AuthorizationError") + } + + "return error if namespace is present in database" in testApp { app => + val alreadyInDatabase = inputTaxo.copy(namespace = "taxonomy1") + + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(alreadyInDatabase)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("already exists") + } + + "return error if namespace is empty" in testApp { app => + val emptyNamespace = inputTaxo.copy(namespace = "") + + val request = FakeRequest("POST", "/api/v1/taxonomy") + .withJsonBody(Json.toJson(emptyNamespace)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].create(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + + } + + "get a taxonomy present" in testApp { app => + val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[TaxonomyCtrl].get("taxonomy1")(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + val resultCase = contentAsJson(result).as[OutputTaxonomy] + + TestTaxonomy(resultCase) must_=== TestTaxonomy( + "taxonomy1", + "The taxonomy 1", + 1, + List(OutputTag("taxonomy1", "pred1", Some("value1"), None, 0)) + ) + } + + "return error if taxonomy is not present in database" in testApp { app => + val request = FakeRequest("GET", "/api/v1/taxonomy/taxonomy404") + .withHeaders("user" -> "admin@thehive.local") + + val result = app[TaxonomyCtrl].get("taxonomy404")(request) + status(result) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("NotFoundError") + } + + "import zip file correctly" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] + zipTaxos.size must beEqualTo(2) + } + + "return error if zip file contains other files than taxonomies" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-otherfiles.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("formatting") + } + + "return error if zip file contains an already present taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-present.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("already exists") + } + + "return error if zip file contains a bad formatted taxonomy" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-badformat.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") + (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") + (contentAsJson(result) \ "message").as[String] must contain("formatting") + } + + /* + "activate a taxonomy" in testApp { app => + + } + + "deactivate a taxonomy" in testApp { app => + + } + + "delete a taxonomy" in testApp { app => + + } + + */ + } + + def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = MultipartFormData( + // file must be place in test/resources/ + dataParts = Map.empty, + files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), + badParts = Seq() + ) + +} diff --git a/thehive/test/resources/data/OrganisationTaxonomy.json b/thehive/test/resources/data/OrganisationTaxonomy.json new file mode 100644 index 0000000000..df6a1338b2 --- /dev/null +++ b/thehive/test/resources/data/OrganisationTaxonomy.json @@ -0,0 +1,5 @@ +[ + {"from": "admin", "to": "taxonomy1"}, + {"from": "cert", "to": "taxonomy1"}, + {"from": "soc", "to": "taxonomy1"} +] \ No newline at end of file diff --git a/thehive/test/resources/data/Tag.json b/thehive/test/resources/data/Tag.json index c6136decb4..094be1895a 100644 --- a/thehive/test/resources/data/Tag.json +++ b/thehive/test/resources/data/Tag.json @@ -68,5 +68,12 @@ "predicate": "testPredicate", "value": "world", "colour": 0 + }, + { + "id": "taxonomy-tag1", + "namespace": "taxonomy1", + "predicate": "pred1", + "value": "value1", + "colour": 0 } ] \ No newline at end of file diff --git a/thehive/test/resources/data/Taxonomy.json b/thehive/test/resources/data/Taxonomy.json new file mode 100644 index 0000000000..500c39c010 --- /dev/null +++ b/thehive/test/resources/data/Taxonomy.json @@ -0,0 +1,8 @@ +[ + { + "id": "taxonomy1", + "namespace": "taxonomy1", + "description": "The taxonomy 1", + "version": "1" + } +] \ No newline at end of file diff --git a/thehive/test/resources/data/TaxonomyTag.json b/thehive/test/resources/data/TaxonomyTag.json new file mode 100644 index 0000000000..80806c707c --- /dev/null +++ b/thehive/test/resources/data/TaxonomyTag.json @@ -0,0 +1,3 @@ +[ + {"from": "taxonomy1", "to": "taxonomy-tag1"} +] \ No newline at end of file diff --git a/thehive/test/resources/machinetag-badformat.zip b/thehive/test/resources/machinetag-badformat.zip new file mode 100644 index 0000000000000000000000000000000000000000..aae10498e3ed41fec6c29969e841a5ab9785e8d3 GIT binary patch literal 4274 zcmb7{Rag}4wt$BQ3F(&Zly3YbL^_56B&EA%1{jA>lys2pkWMLqA!q0wKuUfXb%GfK{x95#000UA2f)GJ$;B7! zZ4VXmfcW}=OvwR6WZzMCe~R&U1;9uFIQTnw0Koq)d8R!E8xJYbKZKtwkUk{03FKdO_?wmGEPo5+V;MLg7(%#>ptKH=Y_WvtWL6d()Gc+M>E95{ea@7IOv z*k_V3MLmpVOB+O)ja9#TiyO0vq8ITkqOt#OEVJ6br9nU>$VsArf3*g*dEh$a7exme zjBP*2+O%l5UI<=))A%fKUi#SKJe#odI-s=P&!ggCu4WkrcJ4b=S{mf=!e3!kve)sl zu7TPD^_7xi)465=fq8EFF6^bsig5)xLMHiXp*N8|i7WI~eFDCA!|tjsukgU65%M0X z0zI_;!mHx%#+sDVtccK6nMTM69%ON>9%9iuZ?Lk_?D$2~?o+ZeisVbB0${r#6$xHz zvAd0x-GSf4E?#lP&-fq#sM0_%7_OMLUnrY*7ss&dfz$e#C97$6rStH0bb(C5+6bF` z!lfaq^(yZo&Z&>lrymr!lJ_aR4_JyumL9Z6S* zU7mpSp_7GqSk>Z7 zQUCzuZ&<#)S?j4q}=~bFPZ? z^Ehq0)L}jt@XYeu;P&T6fNM^yL#EqW_^5-LPToNyrAd#CRV^G5#T6J@L+yc|0*5|F z<__QXS4~X_kUvJR3QqDbe@s`JrkJtW{i0hXKIXpDZIG;6;7bz?=smrL1G?L>%IiEjN}e-LS| z`;Tn=jH4j}BEr^jNE#;@?AW;hL@+z1>42D?<2&EqkCo{ZgH4#^=)F)yS_tIkfF0^e zEw-okO?f@u4A0ND8LuGBJ(5HL>wA(daDd;YNBjlL^DOOI=ke8e60)XFSuKuS<22}yvx!o<9@x4n z+DX`d2b}k#*OAjlKlyJ99L8M}m?n?eiA8UaY1+-b&l~*@Fr&6BJ>&^@bJvufySw;t z#Gicok=Uc}q&B=}c->ud@){x89YYblU2jKBfGkdhIZ}mN7MGF@^d6XB1 zZlQb<8ZR}LfN<`IFNJG%zg|2H7vtXOTS9*pa}G^#C^yw!Btqk(z%|X1tA0XC7WPcJ z+Vm@AtMec0D{`f6`Qi#8O{=33X06q?8IHJpC94B@Km|Wm=AQ0USU^flw)HFD@CeCh znEHe)hf5kyv>o-6Kw{D*Xz)92h-@(sKW;_3ji>CgL|gzWc6f9#mHTbHRDq$znr51^ zo16Pk#(s-BiyT(+D_!K+qB5NhNi>mo##ch9=sn062eY%(3d>R?FOO{D8->voaJFRe zx$T+eIr9KH|9ZY{xg`M=qgxU)@Ol9qb_IpJ&cVp;i9YU6&#+i&(v9gj<|X92iBi5Q zKzq#GkaUJRe{2(goj>7}RLG;JZcYlWghXAZ^HL^-b22xS+lyQ%%ncw!i3y6r;$xq~ z9i{j&2NLP-d4?R*Se|=WXsDR-{Mp|q5N?(zuwkXRUl?sfdfC0Q6@0>4q$F3G^v_;FPC_VvoiZBBwp%JGw=?sW8nAFUJF^Jt!*tj2!d zD4`y8f91l)iLGS`GGB$!zH)gaM+A>-aAI*(@uqdttYQ%QZzl#_4`>+o53Twv2QnraNN7O`=4J1C8X*#zbb}Uu6~GeX|8Ir?ACM7GmU9u zX1mY6;85ZHgln=H92`wIU&_rlyphYGeaaLiWmkKa14^{!cQjIT&R@OdIHO9Y zYQm+kWZ9HxcQ(TPa7a@un){3!J1Zt?qHmKH5Q;5U(M9&ptw9$DSW5FXaeQHFi3|*z zM3!A?N-|D;r6R82Mm_qykOqerazYYDTMq(qf~^sXq(e*|oA84YCEL06J&ABmCNfEA zJXsJR35o1bfJR8!k=4Vq!ym4?ShLWfDPJk$yft3dd^3mU#o4yh(E|C3)Y=e!M^01f=TfMVNJ0MMd1^Wu&JW6la) z$3|Oj2-e&2PzVqvvQu_Bd5eG;AW)IBH`xW8LEC+ZgN%G<2MUpJV6%xD!}A*QTPUFjbCm=)2B{p2qDIoFDeQ zF$;9=lwnzx?n|sBMg%X*%7aUjn^&KX;r~e#A^}+x`?Z zB`DM)QU2_bJb`j|9ys!&b)06XJgs!&o72t687F6V_M`<{1p~Z{sWLv(=0YgUkIwqD z-oK-O^57(P#oY)awa0nwf*O9&;1_DfOcc80s`4L>O+;%kQ@m-%qgIsCFK($g)hRY9 zpzgaWgNl3f)jkHs;u)@FaDG9 z%x=vH4)L{HT^KO3UgB)pU}uYA-EmLRxNfa1KKd_gHOri$4Yx=HEExMyqukQI=1UY` z{t+rCj)HG-CpNDrn5K1b_F#a-Cfm<4j4@xf{)>5YVAN0T0ZCVTzbFXF7L*Y8h5V7V!!XPq)R!h}%%ks8!=Pa8*TPQy z(5pu2GhH`Fe|RL#j4+Fm;_ADUfVYlK_!068V+-v({KR(S>tKas#T#M5%_=9Q0j%Z7 zHPr~`hF4^9+0+rDqC*m^SloD6PDNHqA(ycx0vt~w>9Gf0TBLi~`0KY82F4mkUok$` zALRGG;w4e(i(_}{P51oiM%D80F}^0On-$$YvFBW)8iybY@F*SDkvHC={H}NMxh&o7 zKwhg$tv#=*Tb7y*kINwe8&YJcqAgCcc=)O0(~2%JiF4MoFR0g=hA{ajZzrVf=CeB3JW{gnL(okAEi0gZ&ad@=Czy0yok7JX;(8=&dKd1cqNR z72*5_3_SFYk2OroBcGi3$aT)}HZwtXiOn!G#W~}HZbM)CW1DA^o*WLGSwB!R>v}(9 zymTP@*<~R>)4AG`oP@;k4>dgTB2wy`g2()UcXeNMLK=LwUtV4P{)HUHip#J@(CAgk zQtPFw+Ga>DR6V-QUk*LDTYu062o6Kao=opR5He0@PdWq*rbeN9GS-v<_R=;KNJuZj zj%n2$^H%WjGLvA7kM1Xl%Z_i1)y*unz5 z4Z1RH-D9<~D-xJHdHRPJtVf-0M$c$)T1wY;4_R6_$;2m9$1uIy^?WR$c~(ZbGc3A# z*tOtPo1L`0?vNbE_ap2=MoHy?FKGH*yidK2R;laCUKrKCU~a(NmvIoHwq;T~EGt`A z!x}f@E5B)utvvfV)zc3386qM}Vjk=t3c|{t4|?-69tMDZ>|LRH449V9j?C7@6XO&? zzpQOy1Gql1z_Z|7_2wc^{+@Sr=zBUWN_7h?WQH4ll_+=Vve0$hLK0mz5b}znUiEsM zs716Mx2`3sqpi^N%h^d53Ids|j%JLJY(Wtf?7Qq&N`1^h7F$^?Qv9@cFj>VA_2jBi zgUIv&pbu1tR^a_o?kZ8CB&5-JMC7Kh+KpfNq9f@zJpM5cfU}D9*_dbHjksjh(cMr? zISrQd=h+P6?zf~75x-NnYBnba0RE_%kuDA{E#7|||DQtl$MOFaP2qn||DEywv-ck* k{O8<1O8Bp63Kzu%{MUkLq)R~frvvWqZ2!Hl-~V&_A2=xanE(I) literal 0 HcmV?d00001 diff --git a/thehive/test/resources/machinetag-otherfiles.zip b/thehive/test/resources/machinetag-otherfiles.zip new file mode 100644 index 0000000000000000000000000000000000000000..cac42ffef199dfb9148db6dd29e41cdd970023df GIT binary patch literal 3841 zcmb7{XE+?%x5h^&N_1gF?=@dz%?dMo9!gM)VRbdW{w_dM{DNh#(H4 z1yM!|Mi*{!o_qfHJonpwt!MAG_Luj4KklEg9uYAkfc$qeI3i2{|64r2Cl!DT;OPT{ zJG*&0Nx%_suqicw%zUrS_MhXApac*QZx8_he_w}>swPc-ntlckVkAw3B*cXVB@GQs zgoex?>4FCziR=DYCIlGkk&#oUkdyNL*8k&S{2zBWB=kS-xiSC%?QeH4I|o-cZzs5& zi-LqZ%*Xrh1TNxe+uOZuw%mPa+y9pUYlV$lk*DwY45Eci>k}ol-|ympZs9MBK5oWH zsSL3*D~!>L!^9JB2+!s4oKo3te7oF>CQFM_jo(I zjv06`c6BaBtrTd-vl|3@@(vK4_zigLKA&BgFRC65_AJt?z3vO7%yv7;Bu?KOAR!}d z!opIysSx{4bzqXo5iNVv_>{oW_I8Y1hXi8uahCowRkVdrb{5pWrr2VAeA|@YJ$!I_ zve{$-W$vCRuDL0u0;M)uDTPn~*Xrxk1$1mLi|jlNLLtkj;mrsLn%ucBfn{N>JJ8EI zMq6|fJ?ih@X4$qKa0S9g_=Dr#!lv{skFosuVc;MrVRFBji?460r_mKt%$=wa!XQ&K z#LFAfjYxp+s(aiq@M(t5q!V^Aj)JPbLtdLJ+awh};PhCjR38edh;kJ5+kobr^f+*P z8zlW@L2zsHF+z*a?7*TY&@}b(#@Cg;+n8aSg>LHj>!}M`kIhZuSn|(4eQ4a?PfBZk zGor4}se78inXLm6J)N(I9)p06`B^ggA1Ae-p2thp#mbLq4oa=DCI;tTmDmVN2STFM zj>Uor9Y3cVh&w`m_z8us4dHu zwGoKTht)3*N0>ELUZpz__7*Mn=YSM^+1a|gQV{;h(V37JK4B5kQ3#Duc`oNvz9?Jz zhXLf2bMT-yIxzV{5OM5+Y%^cUX_2H5Tw-_c_)GQ=Y_S4MBZOg`wu_f{SI%yYK7$%j z^eatl-=aK?2~9SdfEOqtRrLBK5R0(2)DF#1q%MtY6c|P@<#9J=2)J&U<~Z?zxF0-S zw_KBgOEAqz7h+>q_xVW$E?+61<+Zy{TvBv|IDKdq zLL5EhmR87Nrf*0LDu=zgNaLqX4C7|2E433lR+#EXiIbBQgvP}@MLNg`Vs@m`+;WV# z#&LW%aPSZb%X!DQYZZO;7yYc9QB65E}*~$5Ry8P_t@yW2(6G?0uPE=jyj|e?_St1GwzG0YmnR_<>)>BCi|JSVl0ok`-csP{7M%^I1hih>TNv- zn1_iYfzC;w#W-wR*ZRf6!BtkgO7i~0#ICgV+b2z7wGrkx|#5t?XU@Tz}8n zT5V^&{o}x}^ESh+?Om&03sRXcHR57`nAb^iIi7l zfsxe0)nfX4RIJiboIgfB&JKW64#kot{kRvb*g0xPQIpG+u~nIcrq+HL7JTz9gHQxK&l3Vru8J$sqK4w7x2YGZ{%T z%fzfc^f0p;jFJ*gMfW{D4UA9GZ+KA%vs`@t%DJJU-m**IEypyvnT_Kn=bZhm7%P^; zXsiur&=CA`@r|xBsuYc)%+T?A>R$viDn=~HX3wn)1sT`%;V~_tYz$3h6_Vf{yzoH& z&G-wd2?7EwS0CEdaQ?`7@HTC-lCp~(aEG>C>t{P!e9BLwaX-C6u42?Vzi8f+Y;!Wg zZFfLRB8nH!i<^`Xe{5i#>K}qDRMA8CO)bG^`+>!|S_D1_^#m4{RWi%YR3$mb-eNHq z=tq6#t>8NQXLm)UjMr}aX9Yn}ij)Ja?yJb1A|;!tMP|E|d%(g7}CRSO1z4lbE`sNln2&!@Z$9RYQMJ%B=+6adJg@a^$67si|&zKn~qToJCd<)aZI zP2i;Mbo3Gfv%uhD`0&g;?!fh4mv%XixX7y@^5NmxHa9xhiI^(-Gsh9@nkxeDo9ocl zz|i-zn(o|QNEVLe666}#0ekMAWez&O2a&dRa2?ivU0cPE_fa!t8vkhhdV!D=kuxr# zJ0O&4S)*`~7(KG^BJ~k7w!g2@d^eEqu8|7U#PW63tEHPrk@)I|pUBv{Ve$~_a5l54 zFuW~A@%l0Oe zlcR#>B&e%&) zv<{ZvHn8p&1984q^k}|wwyNOn$Y_){8%=l%5xt^}L1AOrp>E;hJo?_V61b#$Z?ziO zru^rxN=zN3|BaH7_3`(_6Nkqd-tVby;2>jlUMZRdifLG9n|$S`1&Leg=5w6Qgaiv7 zXkvrm76n)}U1d#44c#YVQsiA}ukfz!yXp;HQEAOuJhv#L4Khc!LB-`qepyI?>RH=w z7pI}7+m{AJOVLoeLGlNr_uafNWZ*fOku6?w4kIkii}Ft2Jp~W2Z@aW2xFnaZ^bnxP zS}FXv;l>)vvcs05NzGD8T+{r7wE&|A6kwcpGe0$!c#^h+xX!Z$UNX#!djOFTF#F$$if3eD$(KD; zS*tWovGC(=DIHZY&E51RF%UM9qT3F9YLVt??Pt)G9}r^_b#_b5;FF-&89#+eZ!D){ zPnyTMD_!FqHDWD97khil=$2EJYAlM%-@SNH_nyfb?N9xKuO(@&JNL9Z)mw5Zy5#AZ zhBzQ#fTj`PTMQ%P2YzDqualY zx$_}Mw#&CW0YRZ?`GfHd7)s6&|FBiq@XIhY*`oGFt3Hx=2?Xs zrFX1EAl9oe_d?Aya9t|M$KnG(6zT&*+PW)-keuZ;-(=X3;|TTlGVDyZaf{K;EQ@FB z;OiTlfebrd4&xaJ8jF`UcY#f-RFY#UBbc7`S^;24j+Jrt1W->Ow-l6Oy^)&J6`bYp zZirLFII%R~8AG45*P)m3B7IHCGvnH4Y;~BMa`qzhHmvHqB_+%1IFpaW%Ha($<@ob2 z-7RqM0WzvYwoiQnfw+4|pS%Qb?fQdHw$9qR4O!>S_RN+g6Jiy?zaZ8z{yd+7$P6S; zt-094zouR6dmjyo(_Mk`*^oxxrAnPUE%Y4L&}64|r2LX?FM8Y$)FYbqo0bwZ+O67| z=Q9&66olTgJDA;yHIgcZ4I|U~LEdl?Mxl3e z*^6ZP(y)&vLt>Ztm9B!y$E}I`VR34F0PYIPCnFyDmy*&Id)EWer3^UAbNmGA`fq4^ zqhZxxP7MJ3K{I1L0zyWj|AWSV1Kl5J{HN-}{%rpZLjGI%4-o#_^$!sKQ}tn?zt{g3 W4~_LmNdG7h{_dvVQ-wf^0JP`2! literal 0 HcmV?d00001 diff --git a/thehive/test/resources/machinetag-present.zip b/thehive/test/resources/machinetag-present.zip new file mode 100644 index 0000000000000000000000000000000000000000..07a6812f6540fd60c1a27c077326f18742418161 GIT binary patch literal 3941 zcmb7{Wmpv4w#NsgOBxC3PDv$ILSA6VVE{?#9AajWaR@=$K~fr|Q(A`ZhG9@ac?Br} ziJ=6BM!0gHd(L^E`|bSKvvzz~d++uCw0}l=w}==4WPidP2{i`%pYmh@09pVy0Alax z=I-qbw|7;P^nm$zgH5Rcq^=TZySx1zc02=UJAVWvfPiS@769Lru6U{XA?w9h0(cuBBs!X=qEO zW~Lkmwqtq0;%31|VB^CTN*-Ibr^0q)nw^){9l(2GFf4oV#aCrn`x13pYK|)L8f*_Fjd08@9^xR zclzOEdE&x%&BO59ZLFmwBFbA^ug^1m+2}8Ao%0(6tGmKUx3Udkk~*r z?^B<7jX~vX1xk0WnZD24#qE>6Cu2|dV)RO(b^?cffEVBJtrNdtZ{3%(EAz!Q6G2|Z zdUe+W0hBrJCs{=4d&9(}BrRB2Di0N6-?;%y{AFCr0W~=-c(lD8E8iuFm@vr(zEDG3 z2zI|ymUO<_9BuQv)imO7Y4Ohyn$=TN$8`K4L>@JJ#z4Sw@ zmr-Mz5!Prjmw`lpE!6;tfJ=yAe8Q!~UX3Ylgkz<1Rn{+Rt)rp^~<>|#7QRb!WeHg}G3Dty@4 zM7azIwXTeI67$=D=AQI9@_6ef|7}5VXVV0s#cy_C(HCHvdP(thW#BGm%yyxdI^lZy zg4T0$lPHetqt5^uxA%+EM!@V=Pxtf#&7iFI;fTKO*P|vNcBg`D*@6#Ww4h$cOEx7c zCNu|SHdtf*b4V37!qSoOR%%yy!Y_dEaz1e)rkJt2$^Vv#)Ja5ODcb~9sQH%WR!F8z&AQDg(479+=5&yf9YwllrUU!cklRf&QENK zB1^M1!z67FAK$LL{Th8HHKO=;n)tp&MH&;DbRrQiSWKb>84`>`*jZ|aWGYdYMK%kL zA(--cnllC6woG%K`9VC7o~>K1Ny8U{9gD43y z;=+*l*k?#bSs~1hbeemvA@?MXp8^LDmQ?wM|GNWBm?;5jSST70>olS~?O9k0Jm4tQ z*;#uq=I;Lxb&U^5_VRx>WTe*Ih8$nc`!-FDWvuN>xZg=!Qwn0kVB~p=1Da}A4Jf8d zy3gL1`-RMIxU##8ZYJcBN9YTGHXzzso8M8lF2Z{h4a!dX;^~^Cc1Sq;kH5=(WUc&^ zPu=^|7DRTXiz1wdzgqRSnFGwjB#`Vb$)Lq}Y)8-f#lpc=c7kfk{*$Dhw9dOHEfZPa zI{D8zjD3I7!aZsqOrZA82+mHdX@ zodL}9FUmP|AKC%yQv8xAuSf$TsYPnUfqPV}GSOT=$3M&tgWn#CCx7wdS+L^ZteZf6 zS+0t!&MGps@yoQ}e~{@{-}TsEH1@d9+;D3{aIZu`jK45CjP6@PKBt0;K}69j(z&TS z)s1gW?R_>GgkMiIRtIyXAc<$0nAJz0WYvIC(juwofoG=y32%W-p+zvu#rIJzO_hz7 zJwW$d)0kE^&f8pb4tHX$SPo;bwxAIM@TS4)2lC9n7-m?4Sdz<`TNe&A zY8b#{+CtbETFNV>zUXrIp z;Db<4WMNq)wd_t+mUrqe5qE`t05Wd{H8{MuFDh-ccGo{U&>E#gIn3&@irgtyww+$y zk`CixrILZiQw0E$(CBtWc!aDSRUI-b?B1e_H9J!$ZKw*tYvY+m63UXfT+O2ut|O{y z@uI*VnjEpsGRBQu#%}A|9gb12p1G`AFbH;W%QixVp8S3}4KZ*6Gzj$pifrEkKo&(A z$Jg8#a}M}2F4}TMq|T0?Mwld#i?-VdA`WJO!Nu|6S@}Ex>;0~s@}BXLSAk?>W3wIZ zbg&a~HS|Z$Bi1!H1l~8#v7?D$;8zXZxr4AA9Lp`lJ-h?<+C9q}ad{6SY474bZ2Y>m zik}>yX38@9+5YtcAulR#R7!V1DBHG1?kYKPWC0=Z5jS#psL^^qfd9UsD$~^Rb#>Iz zZKP;I&66QgwqBTmHFY?f*>o6S&Mc27t$+L}xw~%4pNI8#Wa!ZbYqGmZi1N}g=h#zj zdMPEvnyDDvJ{{YA#~srH!<|k=#0(OwaOM7t^5ADiJyl8EcwRHzstQ#6T!wMzzetas zB~agwFtu)o%$q2Oy1b7+KrGI72 zfzg=n;RiC`y`h2g6C`%WU5cS~umW}g4Zj(P3bbM-3S4s3gm%X#qP5v*!rN}qE6M5? zHJ2ai7MbMJ_n(!*r9Ap;9)oQwe*Lb(G*}P5Q8u(W{*iRzXrkf$o{9oz{iy-+R-j{)d_w?RZZ|aFoYt`bt zLm8u=HL(pUsW|e>Mhe!<+I_z`4Kdxm)W5Y91Em`wdqnck-TOioo|_ff29b9hXK`6n zaQ@*Xbb!t1(Td=fTDsChfFkRp@skD{Yb?u-TS~^YOQrGAk8oA&(@Hjc;t_~IT)k$Q zrG54HD8anFj_f!Z!P$-2+`>SHmZ2{@gXA_@zLp_(@)YX6n>Rg=I@cMLQ4xY~YNXIS zZ%Zom$jo%+Wh#!!PheR=MQgG$d3r%Bb6-e(0JJ1ZE5F!;m@d(SNFG zkiYbMW}|L6^n)B;&&|;f8Obmu#;&Zi_$I~wwPPbu#DkgfnKphQGCS-tR54lUQjBD^ z(n)y`XE}O7H_Ef(6|KYd9YgiRvvwFjSCq9-U- zk2Q`yyYpCoNCDrR_P~p|pK2Tuj#Onx;l#6dlyf)~wCfiiYnYl#{b{0}_lV_nMuNf|r(s5lbNYL|hW@n2Hcw?dxgB^i zPden6f#)$^y0Go6(jbuObWL$~LSotbYW{d}S&dbZeWB;qwcmAv8obwEo}K;qh3v&j z$#X<704o*ffoW>C=`u5w53lm(gOBW%?{)(ML(mEblN&IUyc7OOyNJQ(F*s1(n%3W5 z&V~jJ>qFVGF1ll0i#(oZ6={~)u@bemS%rBNX{Lc2Qb9fz^#C!b4-9GNp%hGfmfw1t zVMCrX#2;eNoo?$MtDRMzz}Ce-FfwC3=5#rRXCP=US=!uXZ&{_1`t)`j)3;tH$R3<) zWt20;t_Q>|1-`Y}NX_jD%65D=$|Y))RQCJ@!+;Cq5Ms1QUt9XZsO|+@1Ln58gDAZ% ztNLzf>9RV`_ydtjcvEZz{`_-q8{B)Clq!jBXka)1_uyyLy`;i@)iR7&_7k~2iH&=)LzeXhJu0REBNW&k}WiH(odX8&o($fYK z0jZAAK92+Sh?f19r9_QRt4`+mtRxFX;S3H(vpbQT0TJa~n_Oqg{cHgiYnkk_LX6i4 z1*IU3$rn~=(Jd(wVb{9}FqPbKBkanFKF zDVfT>>*1I(1{~!%ehPIBFw!F>d!u31WKInL+@P7!zoBj23Gr`eqa^?UsQy5kFVq?4 z><#}9u&w;+u)9gzpZtHo28pky>F1`>4Cv}R$caQRey$!1jVa-k4-GYaQ+OFz&yZ|J z5l(!v8zfEv=wubqb|m)+9SnMPd!?%0QNgHbba45VEAl119O-u2{gMuQiM9)^@W#P|9R@NOng4Nmub%%;nG(<}3l01v>| z(b>b(&lTb5t|;XNf8+--rvZ>bCD9Hy``R6N`_T@8NGbpU@g@-f@SjVxX`5f)E$aDS zEKwUSn{zb}GC*4sMmnU{ML=|u$Y$~&`Q`qnVt=2Z`)rXbHxq*<(Et$^?@wNX@4{ZB z1SQyvG`k2?!k7Xg(}9(CKE#76ZAnEG;?1fLMgr`!NEvUIqW!&&T2A#j zb_cw(yjS@H_))N$LulA!OS2GTP~+h%MD%T{15mp3}9? z9gb67;u^;~DqPZgUM?_(w;f()s2+k5_nxOhq^kmgWG_J^MsoQRy_U5`Rdban{rP6b zUOzXF2}5t@?yjYn)gs+QPQxG{{vo2D0YiTJPv%w^N@~Z$d`b-Juls|jay@@$6KCuX zk&uyoz{1mbsgVb+4G@y)F&!t=x~VXLV6CDC5}FZ5zrOX=vEXIP43p8#J0HJ z6Xa_brz^gN9uEv`w`t#bdLDcPnO$ zFx1=v`HWJwIRxOp=9O>)e3+#-?TTGWprCH-RM6$gHBCbdxtghzgJ96A7#Hz?O<3N~ zUT0oE!<4_Q2ySedA$0^S4y}5F%+oF@Kd<)R#EjZ6_Ru6=&s@-ZZ*39BlYe~FkH+o) zpt2LRAnNX#QPd90ZX1f~?Rq(41_rtm=ExPkpVoo-oGjaws+!RrmfK-X4bOe6u~9b8 zghXkb6^TCpA}a+XNz5ZU5dQ#xZfbY`;H>7_d5leY`L}hSqS+IHJo*!3k8mLw?I+sv z;1K>>PsD4szMb5Pl;U6QoA3B6EIgqzlp~NAhwslsihCD9sF@B*{sNA`;>rhB(U!V|HcI zJ@brtCUF9kI7GOV>KFW9?GVB&NpQnrald$n3Ds%$;(F*IXOZ6Sy5gv3pa$w1AC%$~ z_-4>Vy}31HY$gB83=NjKt~c>c2T5%im;-~6=PL*nxuD0x%y7N-u}p5HBms*^W7dyex;8hTtGZq z^Rt@=EWjm0fNm+^r37qy_r}HI;Z;teTI#|5hK0?xyC)kn78Uojuz=D^iw9WeiKwu}CX1--jvzW513dR{7uxPJImrPk5(~H#RE?sFpOMW^-Kc4NZSMGJi%I0=cwksOf1j+-B+R3%91KMw@PsXP$X%3kMe+_q@}M zI9s-(SgbvG*a-4$>6N}JsvM1?%F^?F7+3-~E=4Zi&Yj;72{mcx$75O}IG8?ER7pd6 z@uEY8bcq+#Qv?J$9*^kPo(o1VKz8U;RaD&_fxGk_IzKwlk~0BX%?BA(@>Sz*g(VB- zWLwiwo_j+&QZf8^e%!Q_q?w^zT3|S?Sj_<4KeLRO8vvH(>kvFbY9_Im#dD*FD5ed{mfMhhf zO$iYt=RjQ_k{x+#$qfo*>7akAO7PNj_O7IgR33NpNTvI*ntFm5=$kfYT(hicBe$u? z#!kEQi)Rnr)~uL>I(g(8VZ!%+J(=+}asf05_X3LTUjx8a#hEA9JQzz(#0oCPW>vJ_ zL4a0-G>MzO%f(j$!Uji3;Gbs~@CI%4xp&BWCq!R`l8=tgwRTte~^r$2d)66 zdCp-m6A75N$mdP(8?&bH)KC3ASYLal0e!eO&_hy;kAXGUk!s6FIXTWk)rjtNbngvM zY&RTtIu#W=K(NZ4_x**J04vH`HEHv4?M#arNa<5K#;NZjBW9AYQ}oNQ_oc;S*G_rh z!tK7KN^(@_f`THfG^J_j0hTb#moP`yPd0qWTRr@`sCnSvqVpYQV#EGi)SRSHmqPWk zTgrI5r|ZDJKeO{BTjf!uhtP~cMh=#S+tr&HW*Z9dF=fd3%$y6SwcN+|XT5nvix414 z>WaS zg^*y;8%=CD+NuPvWvH$zt78}>CJnhG>-)UB=ZQVmbP3pqqyc66(+yz>~F+meFoHy`0cY-YDk6p!@s zl>rhQT`z;5G}>HeTXEi2HmzGOONhCPs|L;}+wn_8AwzNRw99QAYreh^%HMC#iKi8s z+l4W*Ba+x}r~INNrJhwm(TN41Ylo4TZNwv{tTC^?Q*EPy zsP)BjyY!}epL;Mg-?~SvgY4$)XdT~ntx=ChQ3raJ4(ltLuG9Yj9eyrL_t;g`?b2+` ztLj!@WFg{nO2mZ~*{JDBQ_SssEcv)-Ku+PBW&O4NnT~O60Zhnlbl24jFYbA&bwoH^ zohglz$k|I+NJQo8TvPCGsR;>Bz3}2b4Hin^*=Tto$0~`Wyz{ z3rL7FPRpa27=Ooi%=R)fQDL6TI5X8XIo0g{K(2Rh!iqea7eQ4Z`&o|u=S z_ZHYin`L)x#h`X;aIa$RbVx%Q_>t8+fH>?CJjB6EIh^FIpoNZUQ=Tg#(ATId!`?Ga zH@hN{qf?-NcosV9ayg1;B4{pM-r56xSfiGncs+*c-KZA=hUeLu5c^yr5+CVBO-Tt)K>rgZ5!@-B; zB&`nH4%UV2WGf|+OipKu8_`@rQ5D=<+-E9%96?s=SwJ~q=4+&aa+p?1)u>T)`T*Du zA;v87W-`+l~Jzbbs9VzpF9w&*}eb%KuFNBZdEb`G2Ro d{+7tU)BkUZXktJ@`e!P_-`)Ou=b-y@`w!^JiR1tP literal 0 HcmV?d00001 From f2248d3a05e5e411ccd19b29b25b7fdc7e22454b Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 24 Nov 2020 17:19:23 +0100 Subject: [PATCH 41/93] Fixed schema --- .../app/org/thp/thehive/models/TheHiveSchemaDefinition.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index c0542d306f..ad5c3bb2d9 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -112,7 +112,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { } .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => - db.labelFilter("Organisation")(Traversal.V()).toIterator.toTry { o => + db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "_freetags").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), From 6234bc0626882725ff846fa70fb6ab1048a8ad6c Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 25 Nov 2020 16:10:54 +0100 Subject: [PATCH 42/93] Fixed unit test for taxonomy --- .../controllers/v1/TaxonomyCtrlTest.scala | 93 +++++++++++++------ thehive/test/resources/data/Taxonomy.json | 8 +- 2 files changed, 74 insertions(+), 27 deletions(-) diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala index d08034f2c9..6c320635ac 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -2,11 +2,11 @@ package org.thp.thehive.controllers.v1 import org.thp.scalligraph.controllers.FakeTemporaryFile import org.thp.thehive.TestAppBuilder -import org.thp.thehive.dto.v1.{InputEntry, InputPredicate, InputTaxonomy, InputValue, OutputTag, OutputTaxonomy} +import org.thp.thehive.dto.v1._ import play.api.libs.Files import play.api.libs.json.Json -import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.mvc.MultipartFormData.FilePart +import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.test.{FakeRequest, PlaySpecification} case class TestTaxonomy( @@ -22,7 +22,7 @@ object TestTaxonomy { outputTaxonomy.namespace, outputTaxonomy.description, outputTaxonomy.version, - outputTaxonomy.tags.toList, + outputTaxonomy.tags.toList ) } @@ -39,15 +39,18 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { InputPredicate("pred1", None, None, None), InputPredicate("pred2", None, None, None) ), - Some(List( - InputValue("pred1", List( - InputEntry("entry1", None, None, None, None)) - ), - InputValue("pred2", List( - InputEntry("entry2", None, None, None, None), - InputEntry("entry21", None, None, None, None) - )) - )) + Some( + List( + InputValue("pred1", List(InputEntry("entry1", None, None, None, None))), + InputValue( + "pred2", + List( + InputEntry("entry2", None, None, None, None), + InputEntry("entry21", None, None, None, None) + ) + ) + ) + ) ) "create a valid taxonomy" in testApp { app => @@ -113,7 +116,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { .withHeaders("user" -> "certuser@thehive.local") val result = app[TaxonomyCtrl].get("taxonomy1")(request) - status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}") val resultCase = contentAsJson(result).as[OutputTaxonomy] TestTaxonomy(resultCase) must_=== TestTaxonomy( @@ -178,27 +181,65 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { (contentAsJson(result) \ "message").as[String] must contain("formatting") } - /* - "activate a taxonomy" in testApp { app => + "activate a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy2")(request1) + status(result1) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result1)}") + + val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy2", isActive = true)(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") + + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy2") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy2")(request3) + status(result3) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result3)}") + } + + "deactivate a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") - } + val request2 = FakeRequest("PUT", "/api/v1/taxonomy/taxonomy1/deactivate") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].toggleActivation("taxonomy1", isActive = false)(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - "deactivate a taxonomy" in testApp { app => + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } - } + "delete a taxonomy" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[TaxonomyCtrl].get("taxonomy1")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") - "delete a taxonomy" in testApp { app => + val request2 = FakeRequest("DELETE", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[TaxonomyCtrl].delete("taxonomy1")(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - } + val request3 = FakeRequest("GET", "/api/v1/taxonomy/taxonomy1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[TaxonomyCtrl].get("taxonomy1")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } - */ } - def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = MultipartFormData( + def multipartZipFile(name: String): MultipartFormData[Files.TemporaryFile] = // file must be place in test/resources/ - dataParts = Map.empty, - files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), - badParts = Seq() - ) + MultipartFormData( + dataParts = Map.empty, + files = Seq(FilePart("file", name, Option("application/zip"), FakeTemporaryFile.fromResource(s"/$name"))), + badParts = Seq() + ) } diff --git a/thehive/test/resources/data/Taxonomy.json b/thehive/test/resources/data/Taxonomy.json index 500c39c010..5c661448dc 100644 --- a/thehive/test/resources/data/Taxonomy.json +++ b/thehive/test/resources/data/Taxonomy.json @@ -3,6 +3,12 @@ "id": "taxonomy1", "namespace": "taxonomy1", "description": "The taxonomy 1", - "version": "1" + "version": 1 + }, + { + "id": "taxonomy2", + "namespace": "taxonomy2", + "description": "The taxonomy 2", + "version": 1 } ] \ No newline at end of file From 872a7d5b6bce0b3ee65d230834ea007a4f7120a2 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 25 Nov 2020 16:18:57 +0100 Subject: [PATCH 43/93] Fixed user permission test --- thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala index e1831040c5..8a5773b794 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala @@ -109,7 +109,6 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder { Permissions.managePage, Permissions.manageObservable, Permissions.manageAlert, - Permissions.manageTaxonomy, Permissions.manageAction, Permissions.manageConfig, Permissions.accessTheHiveFS From 91861c56afeb7ac7c6ca72f90cca9295b37e0e09 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 8 Dec 2020 11:37:36 +0100 Subject: [PATCH 44/93] Review changes --- .../thp/thehive/controllers/v1/DescribeCtrl.scala | 4 +++- .../thehive/models/TheHiveSchemaDefinition.scala | 1 - .../org/thp/thehive/services/TaxonomySrv.scala | 15 +++++++-------- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala index 33e3fae52e..193aa4d5c0 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala @@ -40,6 +40,7 @@ class DescribeCtrl @Inject() ( // pageCtrl: PageCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, + taxonomyCtrl: TaxonomyCtrl, userCtrl: UserCtrl, customFieldSrv: CustomFieldSrv, impactStatusSrv: ImpactStatusSrv, @@ -102,7 +103,8 @@ class DescribeCtrl @Inject() ( EntityDescription("customField", customFieldCtrl.publicProperties.list.flatMap(propertyToJson("customField", _))), EntityDescription("observableType", observableTypeCtrl.publicProperties.list.flatMap(propertyToJson("observableType", _))), EntityDescription("organisation", organisationCtrl.publicProperties.list.flatMap(propertyToJson("organisation", _))), - EntityDescription("profile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))) + EntityDescription("profile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))), + EntityDescription("taxonomy", taxonomyCtrl.publicProperties.list.flatMap(propertyToJson("taxonomy", _))) // EntityDescription("dashboard", dashboardCtrl.publicProperties.list.flatMap(propertyToJson("dashboard", _))), // EntityDescription("page", pageCtrl.publicProperties.list.flatMap(propertyToJson("page", _))) ) ++ describeCortexEntity("case_artifact_job", "JobCtrl") ++ diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index ad5c3bb2d9..e0b4f9f09a 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -102,7 +102,6 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { taxoVertex.property("namespace", "_freetags") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) - taxoVertex.property("enabled", true) o.addEdge("OrganisationTaxonomy", taxoVertex) Success(()) case _ => Success(()) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index 2051c64930..aab26143cf 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -27,9 +27,8 @@ class TaxonomySrv @Inject() ( def existsInOrganisation(namespace: String)(implicit graph: Graph, authContext: AuthContext): Boolean = { startTraversal - .has(_.namespace, namespace) - .in[OrganisationTaxonomy] - .v[Organisation] + .getByNamespace(namespace) + .organisations .current .exists } @@ -39,7 +38,6 @@ class TaxonomySrv @Inject() ( taxonomy <- createEntity(taxo) _ <- tags.toTry(t => taxonomyTagSrv.create(TaxonomyTag(), taxonomy, t)) richTaxonomy <- Try(RichTaxonomy(taxonomy, tags)) - _ <- activate(richTaxonomy._id) } yield richTaxonomy def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { @@ -59,7 +57,8 @@ class TaxonomySrv @Inject() ( taxo <- get(taxonomyId).getOrFail("Taxonomy") organisations <- Try(organisationSrv.startTraversal.filterNot(_ .out[OrganisationTaxonomy] - .filter(_.unsafeHas("namespace", taxo.namespace)) + .v[Taxonomy] + .has(_.namespace, taxo.namespace) ).toSeq) _ <- organisations.toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) } yield Success(()) @@ -68,9 +67,9 @@ class TaxonomySrv @Inject() ( for { taxo <- get(taxonomyId).getOrFail("Taxonomy") _ <- Try(organisationSrv.startTraversal - .filterNot(_.unsafeHas("name", "admin")) + .hasNot(_.name, "admin") .outE[OrganisationTaxonomy] - .filter(_.otherV().unsafeHas("namespace", taxo.namespace)) + .filter(_.otherV.v[Taxonomy].has(_.namespace, taxo.namespace)) .remove()) } yield Success(()) @@ -80,7 +79,7 @@ object TaxonomyOps { implicit class TaxonomyOpsDefs(traversal: Traversal.V[Taxonomy]) { def get(idOrName: EntityId): Traversal.V[Taxonomy] = - traversal.getByIds(idOrName) + idOrName.fold(traversal.getByIds(_), getByNamespace) def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) From 4fe311197d200f9a8a5550a257501726d18a90ab Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 8 Dec 2020 11:50:07 +0100 Subject: [PATCH 45/93] Correct Scalligraph version --- ScalliGraph | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ScalliGraph b/ScalliGraph index 1a55a0db73..856e64f3e1 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit 1a55a0db730460c6f548695251248934196b6ecc +Subproject commit 856e64f3e1b262821a9d5b8c402ebc13f7562f18 From 65189f5e89d6bb8454523d8f8f72e88aca96f7c2 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 8 Dec 2020 12:12:24 +0100 Subject: [PATCH 46/93] Fixed build --- .../app/org/thp/thehive/controllers/v1/Properties.scala | 7 ------- 1 file changed, 7 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 92213ab6b3..b6dc5ef69c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -498,11 +498,4 @@ class Properties @Inject() ( .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) .build - - lazy val taxonomy: PublicProperties = - PublicPropertyListBuilder[Taxonomy] - .property("namespace", UMapping.string)(_.field.readonly) - .property("description", UMapping.string)(_.field.readonly) - .property("version", UMapping.int)(_.field.readonly) - .build } From 0bfdf13c3d0cfa7f5eee29f055f37064d44d6de1 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 8 Dec 2020 15:12:45 +0100 Subject: [PATCH 47/93] Zip import works with folders --- .../thehive/controllers/v1/TaxonomyCtrl.scala | 4 +++- .../controllers/v1/TaxonomyCtrlTest.scala | 21 ++++++++++++++---- .../test/resources/machinetag-badformat.zip | Bin 4274 -> 4952 bytes thehive/test/resources/machinetag-folders.zip | Bin 0 -> 4578 bytes .../test/resources/machinetag-otherfiles.zip | Bin 3841 -> 4111 bytes thehive/test/resources/machinetag-present.zip | Bin 3941 -> 4595 bytes thehive/test/resources/machinetag.zip | Bin 4076 -> 4618 bytes 7 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 thehive/test/resources/machinetag-folders.zip diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index e81c47a098..39bc1c0a49 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -91,7 +91,9 @@ class TaxonomyCtrl @Inject() ( .asScala for { - inputTaxos <- headers.toTry(h => parseJsonFile(zipFile, h)) + inputTaxos <- headers + .filter(h => h.getFileName.endsWith("machinetag.json")) + .toTry(parseJsonFile(zipFile, _)) richTaxos <- db.tryTransaction { implicit graph => inputTaxos.toTry(inputTaxo => createFromInput(inputTaxo)).map(_.toJson) } diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala index 6c320635ac..68c4627b49 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -148,15 +148,28 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { zipTaxos.size must beEqualTo(2) } - "return error if zip file contains other files than taxonomies" in testApp { app => + "import zip file with folders correctly" in testApp { app => + val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") + .withHeaders("user" -> "admin@thehive.local") + .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-folders.zip"))) + + val result = app[TaxonomyCtrl].importZip(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] + zipTaxos.size must beEqualTo(2) + } + + "return no error if zip file contains other files than taxonomies" in testApp { app => val request = FakeRequest("POST", "/api/v1/taxonomy/import-zip") .withHeaders("user" -> "admin@thehive.local") .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-otherfiles.zip"))) val result = app[TaxonomyCtrl].importZip(request) - status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") - (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") - (contentAsJson(result) \ "message").as[String] must contain("formatting") + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] + zipTaxos.size must beEqualTo(1) } "return error if zip file contains an already present taxonomy" in testApp { app => diff --git a/thehive/test/resources/machinetag-badformat.zip b/thehive/test/resources/machinetag-badformat.zip index aae10498e3ed41fec6c29969e841a5ab9785e8d3..f18619f1843e27bb23dd788ef08717b00ec20bd1 100644 GIT binary patch delta 844 zcmdm_ctfo|z?+#xgaHH$N;?9<3@9PWpumuun4FQBms*mTuA7vYl9peTn^>YB8p6xK z-rICOz7L2&bZG@Q10%}|W(Ec@fiO}F%}8+!Bh{g1T0%^11eylJD5g%sZW(!+E&l2xzj*$w=`uxko^PkClOyL6wPtK~@lCGys~u?t%aS delta 285 zcmcbiwn=gF1wJVyJ_ZGbq{Ni8{G!~%61}YA{JhW*UIuoa4}0Q4SmMK;%{u&_8Eu$Z zL>M@LFte&CkO2zj8#6En0QKi4CTC>krIsY7TOv$I*WDT~dw5U0;*qTz|5WmNF*1oT zLyRa%sR%UhPWy9_k%1wToq+-51Q^)T2x8%P(BuL^iOFvTMIu0U!@!nCT_y&CE{QL# e;AUWCdBM!U044&wS=m4)FahB%ApM9R!~+16rA+Yv diff --git a/thehive/test/resources/machinetag-folders.zip b/thehive/test/resources/machinetag-folders.zip new file mode 100644 index 0000000000000000000000000000000000000000..f11bf049dbec9bf6c70ae2539ab0bc0ca5e5a2da GIT binary patch literal 4578 zcmbW5Wl$VSw#Nqx7TgKJf)1KM(BLkEGdO_&VQ>NjcegydR^2^a-PQf&*M0iKIsf{rDWRYe1AaxZudn*=&Hpa=09t^VgPom&J%@{_ zyMw)hod=h;77hSi7rp=W2fxYR^SwJZ010&$1pxTHsQsp405JZA*2n|^(EJ;kov9hb z%HG1o)RNQM*}?w*;J;t<#|3|ZkL!=Z7Mn?yzq6@zq+07le3Wsmb8g^K&_P7-)asP7 z)8Z4yGhtE!X2He9)<5aWf2?>9XGWpZz=o0+I)-}eBTzXVCwYHjEB-(TD?mVl?ZjNV9QNdz%E~Xqv6VYWpXv>&& zFa7~PV@eSOc|ouqGgBnc`Kn$RU!HuQ>#VF?z;}y@B7EXR`J}9x{b{+@xqN|*F>9WSB_bt9Y$`fT6+69M2Tsv&M4C7}C{;OYxtV946#`t6B*{CV zEVsOfb#-v**Z)X4*ofY~Vm&ACHn^eQJ~hmn9jFP_Dk}@(_jDp$RLsL-OckzoObUF}{e33gh@my$19$eADmbhSW(CkLe-d0O!o};^)ovkrg^OF5(JydlM)w5g=^>%m zJsaqg6?80VjT6N5vF8TTOIDnQLtC|0-^wFNPd znAJJPvk1B1exNzU-9g{70hru~XFeJcL9N+#nXy(@Y~rSlMiZ>Nr4?0F&+x=VDq{(? z=J2Xxr9Fhur>BSVtL$`ji*&JF@@8lGv-6;pZZ@FoZ7v5MpLYPI7l4Dq3z)=$IO)7k zIUPxAT}(1>3R2aAx}wkrqAz}JFLreFgk`fpsDkj0*NrgwEZxm)EI6QR4NQ{}EwQBR zx5lAMEFufosrCYr*rOAya%G1e1>HHPR(8LVl5EmM$v!oVYM`Z%YwY4=ChzW_}`PSw@GT150$5H+*ClgWO~L zg0~BDGDSBJx25Mi3+`fiGV4lGZz@{ie*6_QL&~8{y69w!BBw?ZXL}xtM}BwW`0a(- zVXeA=^IP~iZ(&lXG&=GR2)aiP#D=F97$rVMuTW?-MK>c~J&3ib!6Nqtt2d*0zM2&J z`17IprYXq^-SCrb)}!z=nFzDU&C;E`kWMeN3l}zmRRJ^uYc|eg!hxJ1kXPRac=X7G zz>&EvlN_v;_jCb2Til3lj~B%~8G@z()QA;l36ABPIa4ctT+JbnceB8nD%_$gZO;(j z6(T3fTLe|F^xs%I&k4MFClNV}F2B?eAKhiM0+?jIOoiy#^l2gDn2i%>>8ZIoe{_~Ti% z{+!T#rJqLBU-{q$8G>gziWeFt6eRW8hjNazl^w}d$LojhTDaD; zz>yQ}8u3?*TQ6t*A;PPO?&q=kqi(45%PLw%M9VUKTYb&n`gG`nzi2qi>E#IF986Hj z+)MJ1U!Bw~jNXdMMMm9`J|^+qA{*DeL=*9q6ot)mMZXE)e$DMIAyrq9m$J~p-9Gxd z=q@==H#F4fGlJ9Qz8Rr#)vzgFz3!e zqM7s0ZL!W4mZJwJbq9r93R%H)%hMi;22d@+pm&E$2UjLvDqTbcDHUe%f6gRq^DwJq z98r9@p0hspHAV`d^pqPD1|km_XgC`tmPx)zSvAq+6RnpRrHa%r{+PNhiSX?IG%VXx zv;w52^v4EzmS)htS7w57FFf7FYcGhQ25>?@8;rfrLk$VcNiT{=(5{`Wd?5|{X8e{Q zbPgj9{gD3J?qKTaNW;ky36R%no1erJFP}a+8FoG57PoJZ&nVb<}w?@Z72; ziRE4P-mNdiq&K+S^ic3)az`(X@-*{GpL%aNPf8?nC4SX4%SH(ouE8-upbPVK6MiY7 zZhK#F3lD)R)Gl$##^2@?3KOpo`=*H5;u>r@>X z=DTl+N6kDwX!gE(zN!teo`LTwvfEmbq|xa0o);-(?6V|+s=fG+&2x~iybHc#G`Qtq zu2*5RTz&Isj(bI)=fXj;-t)<;KE}p!nZ+E6bot3eWf@}WF5$bww)W)~tDL%OFanY4 zC`ZPIsYB^!H{GOY6Vc!-j56OVzxMjBa);GP$3-lbj;iTIO=H{X)XB!hu9I?B8YN%T z5ZH0H$}!yG$UPoJsMJ|MqbJU3y^wjQNk)15F2FLE+>`s< zgeuCLT7n(OWO1=3D@geK9id-s|NgCDV)9fAMiRoQY5S4vp|~+u$z5nB9UIz9S&<_d zK{Dw9rdgxWfEvT>EW>Y8+ujm_czQ%T#ccgww$J*-b!6HCc9OaC-{&1Wy`$uSAA)^D zj}6yq*yx!FD{(imGT~4L<4vw*S_v>(RALB!URm#kD`{|!x4Ki=rDV+uO{m&a?n*wU zvFj4;>GM4TFIC?8$$BT-)e9_rJMHYS)ex;8R>#p8(K8Q}=3`2-gMEyd&dFMYDVf!+ zj9|gK!A>t@=hnJ9Z%aJvP@^Bieqsw0_dn6ZeRfudSaDZwXl{rSwp!??JYIC&@2r2F z%%hgOHF~_YSL{;RAXYi2s;cT%8`QZE{IPvgXWwR*bWB@EWvoDWq<>?g_` zP!4A^Q(KGwQr^CJ3;(_Aek8`=qVpQZYEZH59bs1^yVFn_|M|}5ckKN5bHhCi8QcQ2`zVt9lRYvo z?}NnB=AMb&gff@#)=bjqFzfeAoV(IlGa4y1*HkRLt_2`a(BR-tlp=bTk^&%CI1~TSO9rLWB{gxRE#A61<)y zm+3Or%2`+fV3YpzL&eyHc}gn6CavaO3CEPdY9mqHqkHNvP5}Db&vl4*YsMzC1mQ`Q z@B6^@KCCq~V(7z*X7VedF#BmwiN>pVvU<8ID`OP~wD28txcs0pxP1bc z5F$iMmFtL-1R!S=3={ltSw>UwDtv!~yex`2w-s;aSyQXY7KRi*X?L@! zy+ww}k^|G|n?UE>%ETCH)O(H-VTZZKv0G-hFm&|}!O;Ej&5~r*AZN>cO~)*%o>Cz# zvn?YliLu301#2JZ-7NkH2pJq<&(e4T z_!B26wDD@><#qfs`AY4@r)Jm7!GZeEYxHa!K!Z%w>7Qd-cV!_$6@_nS^l$CTL=C+r zDnX(=RMau22>@m5NeYki_Xv&XkUTQCavky<&mepJ_zTQ4sqX=5Fa4sYdwG^ z%7BjC(K&TtGwIyN`ML|Z;1vBxyU=uC>^^UJ!9xB?Pus&JIUTgb-#2KQR{NlXxhC|s z(AH^hR1ns7Kr7{lSne;`)2p|e-li=bw=eAJl$eS+vvTzw7^>+y@uTNEYZn@`DPWQ% zBmK;pF)Zshn_5wy-nCTP4ifv((Eh3Aj>?hlB(ac4;2WATPZ>@^uC=Sr^8mRINwbC{~9b5e)IQTgawX*1~NgMrQtCpc&*mv>*|7chm z3O^M;+%Jjcvvw@Yc_}X@#rxXEWzcAQ@|Q|;{E7LuDJkd6{s7!kq>Whi5JRjKfX9jX z#YYH5qlKA)fy3ZN!j`xHPulwOyvN1S*D9HX;;Tw$iFJ9rn;9$5h?)itLUjIAt4i^{QvI3pKijv#IN1o_Y(d=|I3;yqa5r86 literal 0 HcmV?d00001 diff --git a/thehive/test/resources/machinetag-otherfiles.zip b/thehive/test/resources/machinetag-otherfiles.zip index cac42ffef199dfb9148db6dd29e41cdd970023df..81501841780244d906cd544f35dd59b96ee549ea 100644 GIT binary patch delta 447 zcmZpa>sJT~@MdNaVE_T+(vCnd14@W7C@|zECTC>krIsY7>*kkaq!y)R=A;(uhlcPn zuupC}A3qg{L3C*aHv=Qf3uXoeFfl<&N(rat$%%ZT>|sDnKs?bZ&QcYpN`&QlS;hHz zU|Z95x5mpJ-UG6ABHV-3XXxP>;LXYgGL{(#-5D7eHt~UY0GMTfdjJ3c delta 268 zcmeBIXq2-G@MdNaVc-Bl;gpI%1}IPh(mV_b3_1D5C25&Csd^<9C7~g_4D8|W_ryc- z#3gYm0zfsniOCt6d8s9d>6Us~#rb()b?LfW<7E%;iB~+bb+a{NJ+BkY2Hq(gY$`z0 zVK%^Q+|meQ;j?dY8NW^h$U+#{(pU#%60$zNw1S&~k>v$50|S@{@MdKLnZN{uJAkw? HABYD47K=o= diff --git a/thehive/test/resources/machinetag-present.zip b/thehive/test/resources/machinetag-present.zip index 07a6812f6540fd60c1a27c077326f18742418161..bcaa2464e5e1f8097cfab27b37fb032699b6b47c 100644 GIT binary patch delta 905 zcmaDV_gT3N;?9<3@9PYpumuun4FQBms*mTu3J!)TAZ3!q8}Q<%fP;* z>3sY$AO_K;72FJrEH9WD7{CO=Kua_OMX?yD3N+CYV&;6HSul)d<^(AV6)c*;dJ#_4 z%PP*#13NEWcWeB5h;bY3YI*BHKH5`(>OE=f2BLV=wc~ue8xVtNG$%p4$-%(E@I0jg z9&Q4R3=CQr2J3(W4rVeo2S5I>2kc;=MU&t2ZU8$NTYSLc=dUA(g)oVP*kNQ6VTMFF zw)lZq0JQ(FBgj)&Y#f>?y?ocu;WVsfsah#tre7}(Odm6rj(jb{ZV_?Q@&7h> F1OP@pItm6DUuybN` zx5mpJ-UBjXqn$2eJ;>$C_-(g@*j@y*v>1p%G@9iQk8m(>Fl1I0!9!%eF))Ns-DIhc z<|YBKqtc;{0vf$}F=IV1*j3m97Z%um9YHLFSwsXiBa;X-B-XG6Ho{;}AX65}WR$Q6 z2mRy>ewoQ?`~vkL&%wZ!#wkE1iarDfYw%+RBEpH-VgzQ>Uq@CZuuW31bcPv0Kqu;? ipgJ!Aq6wh~oAZz&0@Hb{Y#=AF0^wsu1_nU^5Dx(EGrsWv delta 285 zcmeBDc_VKZ;LXe;!oUH9DOE**3{Y@@iGe|YL4hGRF*zeMFSR5w-9j&`I6p5mgqMLm zMt5tx?BPA}ibu9?yr;`JIgCj}1y!*nLUB4m@#a}f^}KE{tBQ(GE@Nk8V1QW#vwBM- zh(*Za$?}2{lP3v^M1U-Yfh~=xKm!q4u{$Haw1S&~k>v$50|S@{@MdKLnZN{udw{eQ HKZpkaMhi+? From 0d1a8b7638e46720def087992e25bf8524e8910f Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Wed, 9 Dec 2020 06:30:48 +0100 Subject: [PATCH 48/93] #1668 WIP: Add taxonomies list admin page --- frontend/app/index.html | 2 + frontend/app/scripts/app.js | 15 +++ .../admin/taxonomy/TaxonomyListCtrl.js | 102 ++++++++++++++++++ .../app/scripts/services/api/ProfileSrv.js | 6 +- .../app/scripts/services/api/TaxonomySrv.js | 78 ++++++++++++++ .../views/components/header.component.html | 6 ++ .../views/partials/admin/taxonomy/import.html | 26 +++++ .../views/partials/admin/taxonomy/list.html | 56 ++++++++++ .../partials/admin/taxonomy/list/toolbar.html | 12 +++ 9 files changed, 302 insertions(+), 1 deletion(-) create mode 100644 frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js create mode 100644 frontend/app/scripts/services/api/TaxonomySrv.js create mode 100644 frontend/app/views/partials/admin/taxonomy/import.html create mode 100644 frontend/app/views/partials/admin/taxonomy/list.html create mode 100644 frontend/app/views/partials/admin/taxonomy/list/toolbar.html diff --git a/frontend/app/index.html b/frontend/app/index.html index 800b08a1ae..9a4760768f 100644 --- a/frontend/app/index.html +++ b/frontend/app/index.html @@ -166,6 +166,7 @@ + @@ -292,6 +293,7 @@ + diff --git a/frontend/app/scripts/app.js b/frontend/app/scripts/app.js index 800a72397d..80d8e764a0 100644 --- a/frontend/app/scripts/app.js +++ b/frontend/app/scripts/app.js @@ -217,6 +217,21 @@ angular.module('thehive', [ permissions: ['manageProfile'] } }) + .state('app.administration.taxonomies', { + url: '/taxonomies', + templateUrl: 'views/partials/admin/taxonomy/list.html', + controller: 'TaxonomyListCtrl', + controllerAs: '$vm', + title: 'Taxonomies administration', + resolve: { + appConfig: function(VersionSrv) { + return VersionSrv.get(); + } + }, + guard: { + permissions: ['manageTaxonomy'] + } + }) .state('app.administration.organisations', { url: '/organisations', templateUrl: 'views/partials/admin/organisation/list.html', diff --git a/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js new file mode 100644 index 0000000000..47661db02f --- /dev/null +++ b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js @@ -0,0 +1,102 @@ +(function() { + 'use strict'; + + angular.module('theHiveControllers') + .controller('TaxonomyListCtrl', TaxonomyListCtrl) + .controller('TaxonomyImportCtrl', TaxonomyImportCtrl); + + function TaxonomyListCtrl($uibModal, TaxonomySrv, NotificationSrv, ModalSrv, appConfig) { + var self = this; + + this.appConfig = appConfig; + + self.load = function() { + TaxonomySrv.list() + .then(function(response) { + self.list = response; + }) + .catch(function(rejection) { + NotificationSrv.error('Taxonomies management', rejection.data, rejection.status); + }); + }; + + self.import = function () { + var modalInstance = $uibModal.open({ + animation: true, + templateUrl: 'views/partials/admin/taxonomy/import.html', + controller: 'TaxonomyImportCtrl', + controllerAs: '$vm', + size: 'lg' + }); + + modalInstance.result + .then(function() { + self.load(); + }) + .catch(function(err){ + if(err && !_.isString(err)) { + NotificationSrv.error('Taxonomies import', err.data, err.status); + } + }); + }; + + this.toggleActive = function(id, active) { + TaxonomySrv.toggleActive(id, active) + .then(function() { + NotificationSrv.log('Taxonomy has been successfully ' + active ? 'activated' : 'deactivated', 'success'); + + self.load(); + }) + .catch(function(err){ + if(err && !_.isString(err)) { + NotificationSrv.error('Taxonomies ' + active ? 'activation' : 'deactivation', err.data, err.status); + } + }); + }; + + self.update = function(id, taxonomy) { + // TODO + // TaxonomySrv.update(id, _.pick(taxonomy, '...')) + TaxonomySrv.update(id, _.pick(taxonomy, '...')) + .then(function(/*response*/) { + self.load(); + NotificationSrv.log('Taxonomy updated successfully', 'success'); + }) + .catch(function(err) { + NotificationSrv.error('Error', 'Taxonomy update failed', err.status); + }); + }; + + self.create = function(taxonomy) { + TaxonomySrv.create(taxonomy) + .then(function(/*response*/) { + self.load(); + NotificationSrv.log('Taxonomy created successfully', 'success'); + }) + .catch(function(err) { + NotificationSrv.error('Error', 'Taxonomy creation failed', err.status); + }); + }; + + self.$onInit = function() { + self.load(); + }; + } + + function TaxonomyImportCtrl($uibModalInstance, TaxonomySrv, NotificationSrv) { + this.formData = {}; + + this.ok = function () { + TaxonomySrv.import(this.formData) + .then(function() { + $uibModalInstance.close(); + }, function(response) { + NotificationSrv.error('TaxonomyImportCtrl', response.data, response.status); + }); + }; + + this.cancel = function () { + $uibModalInstance.dismiss('cancel'); + }; + } +})(); diff --git a/frontend/app/scripts/services/api/ProfileSrv.js b/frontend/app/scripts/services/api/ProfileSrv.js index 40ebe5af40..303809f06d 100644 --- a/frontend/app/scripts/services/api/ProfileSrv.js +++ b/frontend/app/scripts/services/api/ProfileSrv.js @@ -16,6 +16,7 @@ 'manageCustomField', 'manageConfig', 'manageTag', + 'manageTaxonomy', 'manageProfile', 'manageAnalyzerTemplate', 'manageObservableTemplate' @@ -25,6 +26,7 @@ manageOrganisation: 'Manage organisations', manageCustomField: 'Manage custom fields', manageConfig: 'Manage configurations', + manageTaxonomy: 'Manage taxonomies', manageTag: 'Manage tags', manageProfile: 'Manage profiles', manageAnalyzerTemplate: 'Manage analyzer templates', @@ -61,7 +63,9 @@ }; this.list = function() { - return $http.get(baseUrl); + return $http.get(baseUrl, {params: { + range: 'all' + }}); }; this.get = function(name) { diff --git a/frontend/app/scripts/services/api/TaxonomySrv.js b/frontend/app/scripts/services/api/TaxonomySrv.js new file mode 100644 index 0000000000..747b9a4a52 --- /dev/null +++ b/frontend/app/scripts/services/api/TaxonomySrv.js @@ -0,0 +1,78 @@ +(function() { + 'use strict'; + angular.module('theHiveServices') + .service('TaxonomySrv', function($http, QuerySrv) { + // var self = this; + var baseUrl = './api/v1/taxonomy'; + + this.list = function() { + // return $http.get(baseUrl, {params: { + // range: 'all' + // }}); + // + return QuerySrv.call('v1', [ + { _name: 'listTaxonomy' } + ], { + name:'list-taxonomies' + }); + + //listTaxonomies + }; + + this.get = function(name) { + return $http.get(baseUrl + '/' + name); + }; + + this.toggleActive = function(id, active) { + return $http.put([baseUrl, id, !!active ? 'activate' : 'deactivate'].join('/')); + }; + + // this.map = function() { + // return self.list() + // .then(function(response) { + // return _.indexBy(response.data, 'name'); + // }); + // }; + + this.create = function(profile) { + return $http.post(baseUrl, profile); + }; + + this.update = function(id, profile) { + return $http.patch(baseUrl + '/' + id, profile); + }; + + this.remove = function(id) { + return $http.delete(baseUrl + '/' + id); + }; + + this.import = function(post) { + var postData = { + file: post.attachment + }; + + return $http({ + method: 'POST', + url: baseUrl + '/import-zip', + headers: { + 'Content-Type': undefined + }, + transformRequest: function (data) { + var formData = new FormData(), + copy = angular.copy(data, {}); + + angular.forEach(data, function (value, key) { + if (Object.getPrototypeOf(value) instanceof Blob || Object.getPrototypeOf(value) instanceof File) { + formData.append(key, value); + delete copy[key]; + } + }); + + return formData; + }, + data: postData + }); + }; + }); + +})(); diff --git a/frontend/app/views/components/header.component.html b/frontend/app/views/components/header.component.html index 497dcd2a85..ca85b74689 100644 --- a/frontend/app/views/components/header.component.html +++ b/frontend/app/views/components/header.component.html @@ -98,6 +98,12 @@ Case custom fields +
  • + + + Taxonomies + +
  • diff --git a/frontend/app/views/partials/admin/taxonomy/import.html b/frontend/app/views/partials/admin/taxonomy/import.html new file mode 100644 index 0000000000..634dce0d94 --- /dev/null +++ b/frontend/app/views/partials/admin/taxonomy/import.html @@ -0,0 +1,26 @@ +
    + + + +
    diff --git a/frontend/app/views/partials/admin/taxonomy/list.html b/frontend/app/views/partials/admin/taxonomy/list.html new file mode 100644 index 0000000000..e8eb9c5aa0 --- /dev/null +++ b/frontend/app/views/partials/admin/taxonomy/list.html @@ -0,0 +1,56 @@ +
    +
    +
    +
    +

    List of taxonomies

    +
    +
    +
    + +
    +
    +
    No taxnomies found.
    +
    +
    + + +
    +
    + + + + + + + + + + + + + + + + + +
    NameDescription# Tags
    +
    + {{::taxonomy.namespace}} +
    +
    + {{::taxonomy.description}} + + {{::taxonomy.tags.length}} + + + Edit + + + Delete +
    +
    +
    +
    +
    +
    +
    diff --git a/frontend/app/views/partials/admin/taxonomy/list/toolbar.html b/frontend/app/views/partials/admin/taxonomy/list/toolbar.html new file mode 100644 index 0000000000..e7b922a3d6 --- /dev/null +++ b/frontend/app/views/partials/admin/taxonomy/list/toolbar.html @@ -0,0 +1,12 @@ +
    +
    + +
    +
    From eecd1919b09677fc85b71426771ba857dee423ac Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 10 Dec 2020 11:48:46 +0100 Subject: [PATCH 49/93] Changed freetags namespace & fixed visibily issue for listing taxonomies & added enabled property --- .../org/thp/thehive/dto/v1/Taxonomy.scala | 6 +- .../thehive/controllers/v1/Conversion.scala | 12 +++ .../thehive/controllers/v1/Properties.scala | 2 + .../thehive/controllers/v1/TaxonomyCtrl.scala | 35 +++----- .../controllers/v1/TaxonomyRenderer.scala | 45 +++++++++++ .../models/TheHiveSchemaDefinition.scala | 8 +- .../thp/thehive/services/TaxonomySrv.scala | 79 +++++++++++-------- 7 files changed, 126 insertions(+), 61 deletions(-) create mode 100644 thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala index 7081347184..3835c4c0bc 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Taxonomy.scala @@ -1,8 +1,7 @@ package org.thp.thehive.dto.v1 import java.util.Date - -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{JsObject, Json, OFormat} /* Format based on : @@ -65,7 +64,8 @@ case class OutputTaxonomy( namespace: String, description: String, version: Int, - tags: Seq[OutputTag] + tags: Seq[OutputTag], + extraData: JsObject ) object OutputTaxonomy { diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index d2518f459f..e12c8aa200 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -266,9 +266,21 @@ object Conversion { .withFieldComputed(_._id, _._id.toString) .withFieldConst(_._type, "Taxonomy") .withFieldComputed(_.tags, _.tags.map(_.toOutput)) + .withFieldConst(_.extraData, JsObject.empty) .transform ) + implicit val taxonomyWithStatsOutput: Renderer.Aux[(RichTaxonomy, JsObject), OutputTaxonomy] = + Renderer.toJson[(RichTaxonomy, JsObject), OutputTaxonomy] { taxoWithExtraData => + taxoWithExtraData._1 + .into[OutputTaxonomy] + .withFieldComputed(_._id, _._id.toString) + .withFieldConst(_._type, "Taxonomy") + .withFieldComputed(_.tags, _.tags.map(_.toOutput)) + .withFieldConst(_.extraData, taxoWithExtraData._2) + .transform + } + implicit val tagOutput: Renderer.Aux[Tag, OutputTag] = Renderer.toJson[Tag, OutputTag]( _.into[OutputTag] diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index b6dc5ef69c..54ff5f31a4 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -21,6 +21,7 @@ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TagOps._ import org.thp.thehive.services.TaskOps._ +import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -497,5 +498,6 @@ class Properties @Inject() ( .property("namespace", UMapping.string)(_.field.readonly) .property("description", UMapping.string)(_.field.readonly) .property("version", UMapping.int)(_.field.readonly) + .property("enabled", UMapping.boolean)(_.select(_.enabled).readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 39bc1c0a49..8c1358f137 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -14,29 +14,27 @@ import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} -import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TaxonomyOps._ -import org.thp.thehive.services.{OrganisationSrv, TagSrv, TaxonomySrv} +import org.thp.thehive.services.{TagSrv, TaxonomySrv} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} import scala.collection.JavaConverters._ -import scala.util.{Failure, Success, Try} +import scala.util.{Failure, Try} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, properties: Properties, taxonomySrv: TaxonomySrv, - organisationSrv: OrganisationSrv, tagSrv: TagSrv, @Named("with-thehive-schema") implicit val db: Database -) extends QueryableCtrl { +) extends QueryableCtrl with TaxonomyRenderer { override val entityName: String = "taxonomy" override val publicProperties: PublicProperties = properties.taxonomy override val initialQuery: Query = Query.init[Traversal.V[Taxonomy]]("listTaxonomy", (graph, authContext) => - organisationSrv.get(authContext.organisation)(graph).taxonomies + taxonomySrv.startTraversal(graph).visible(authContext) ) override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Taxonomy]]( @@ -48,8 +46,12 @@ class TaxonomyCtrl @Inject() ( Query.withParam[OutputParam, Traversal.V[Taxonomy], IteratorOutput]( "page", FieldsParser[OutputParam], - (range, traversal, _) => - traversal.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTaxonomy) + { + case (OutputParam(from, to, extraData), taxoSteps, authContext) => + taxoSteps.richPage(from, to, extraData.contains("total")) { + _.richTaxonomyWithCustomRenderer(taxoStatsRenderer(extraData - "total")(authContext)) + } + } ) override val outputQuery: Query = Query.outputWithContext[RichTaxonomy, Traversal.V[Taxonomy]]((traversal, _) => @@ -59,17 +61,6 @@ class TaxonomyCtrl @Inject() ( Query[Traversal.V[Taxonomy], Traversal.V[Tag]]("tags", (traversal, _) => traversal.tags) ) - def list: Action[AnyContent] = - entrypoint("list taxonomies") - .authRoTransaction(db) { implicit request => implicit graph => - val taxos = taxonomySrv - .startTraversal - .visible - .richTaxonomy - .toSeq - Success(Results.Ok(taxos.toJson)) - } - def create: Action[AnyContent] = entrypoint("import taxonomy") .extract("taxonomy", FieldsParser[InputTaxonomy]) @@ -94,7 +85,7 @@ class TaxonomyCtrl @Inject() ( inputTaxos <- headers .filter(h => h.getFileName.endsWith("machinetag.json")) .toTry(parseJsonFile(zipFile, _)) - richTaxos <- db.tryTransaction { implicit graph => + richTaxos <- db.tryTransaction { implicit graph => inputTaxos.toTry(inputTaxo => createFromInput(inputTaxo)).map(_.toJson) } } yield Results.Created(richTaxos) @@ -128,9 +119,9 @@ class TaxonomyCtrl @Inject() ( if (inputTaxo.namespace.isEmpty) Failure(BadRequestError(s"A taxonomy with no namespace cannot be imported")) - else if (inputTaxo.namespace == "_freetags") + else if (inputTaxo.namespace.startsWith("_freetags")) Failure(BadRequestError(s"Namespace _freetags is restricted for TheHive")) - else if (taxonomySrv.existsInOrganisation(inputTaxo.namespace)) + else if (taxonomySrv.startTraversal.alreadyImported(inputTaxo.namespace)) Failure(BadRequestError(s"A taxonomy with namespace '${inputTaxo.namespace}' already exists in this organisation")) else for { diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala new file mode 100644 index 0000000000..07835754be --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala @@ -0,0 +1,45 @@ +package org.thp.thehive.controllers.v1 + +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.thehive.services.TaxonomyOps._ +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.thehive.models.Taxonomy +import play.api.libs.json._ + +import java.util.{Map => JMap} + +trait TaxonomyRenderer { + + def enabledStats: Traversal.V[Taxonomy] => Traversal[JsValue, Boolean, Converter[JsValue, Boolean]] = + _.enabled.domainMap(l => JsBoolean(l)) + + def taxoStatsRenderer(extraData: Set[String])(implicit + authContext: AuthContext + ): Traversal.V[Taxonomy] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => + def addData[G]( + name: String + )(f: Traversal.V[Taxonomy] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ + JsObject, + JMap[String, Any] + ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => + val dataTraversal = f(traversal.start) + t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => + t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) + } + } + + if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) + else { + val dataName = extraData.toSeq + dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( + traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => + JsObject.empty + ) + ) { + case (f, "enabled") => addData("enabled")(enabledStats)(f) + case (f, _) => f + } + } + } +} diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index e0b4f9f09a..c3af62d20a 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -93,13 +93,13 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { db.tryTransaction { implicit g => // For each organisation, if there is no custom taxonomy, create it db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => - Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", "_freetags").headOption match { + Traversal.V(EntityId(o.id)).out[OrganisationTaxonomy].v[Taxonomy].unsafeHas("namespace", s"_freetags_${o.id()}").headOption match { case None => val taxoVertex = g.addVertex("Taxonomy") taxoVertex.property("_label", "Taxonomy") taxoVertex.property("_createdBy", "system@thehive.local") taxoVertex.property("_createdAt", new Date()) - taxoVertex.property("namespace", "_freetags") + taxoVertex.property("namespace", s"_freetags_${o.id()}") taxoVertex.property("description", "Custom taxonomy") taxoVertex.property("version", 1) o.addEdge("OrganisationTaxonomy", taxoVertex) @@ -112,7 +112,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .dbOperation[Database]("Add each tag to its Organisation's Custom taxonomy") { db => db.tryTransaction { implicit g => db.labelFilter("Organisation")(Traversal.V()).unsafeHas("name", P.neq("admin")).toIterator.toTry { o => - val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", "_freetags").head + val customTaxo = Traversal.V(EntityId(o.id())).out("OrganisationTaxonomy").unsafeHas("namespace", s"_freetags_${o.id()}").head Traversal.V(EntityId(o.id())).unionFlat( _.out("OrganisationShare").out("ShareCase").out("CaseTag"), _.out("OrganisationShare").out("ShareObservable").out("ObservableTag"), @@ -125,7 +125,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { tag.property("predicate").value().toString, tag.property ("value").orElse("") ) - tag.property("namespace", "_freetags") + tag.property("namespace", s"_freetags_${o.id()}") tag.property("predicate", tagStr) tag.property("value").remove() customTaxo.addEdge("TaxonomyTag", tag) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index aab26143cf..ec120177d5 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -1,20 +1,20 @@ package org.thp.thehive.services import java.util.{Map => JMap} - import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services.{EdgeSrv, VertexSrv} +import org.thp.scalligraph.traversal.Converter.Identity import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{Converter, Traversal} -import org.thp.scalligraph.{EntityId, EntityIdOrName, RichSeq} +import org.thp.scalligraph.{BadRequestError, EntityId, EntityIdOrName, RichSeq} import org.thp.thehive.models._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TaxonomyOps._ -import scala.util.{Success, Try} +import scala.util.{Failure, Success, Try} @Singleton class TaxonomySrv @Inject() ( @@ -25,14 +25,6 @@ class TaxonomySrv @Inject() ( val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag] val organisationTaxonomySrv = new EdgeSrv[OrganisationTaxonomy, Organisation, Taxonomy] - def existsInOrganisation(namespace: String)(implicit graph: Graph, authContext: AuthContext): Boolean = { - startTraversal - .getByNamespace(namespace) - .organisations - .current - .exists - } - def create(taxo: Taxonomy, tags: Seq[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = for { taxonomy <- createEntity(taxo) @@ -41,7 +33,7 @@ class TaxonomySrv @Inject() ( } yield richTaxonomy def createFreetag(organisation: Organisation with Entity)(implicit graph: Graph, authContext: AuthContext): Try[RichTaxonomy] = { - val customTaxo = Taxonomy("_freetags", "Custom taxonomy", 1) + val customTaxo = Taxonomy(s"_freetags_${organisation._id}", "Custom taxonomy", 1) for { taxonomy <- createEntity(customTaxo) richTaxonomy <- Try(RichTaxonomy(taxonomy, Seq())) @@ -53,25 +45,23 @@ class TaxonomySrv @Inject() ( Try(startTraversal.getByNamespace(name)).getOrElse(startTraversal.limit(0)) def activate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = - for { - taxo <- get(taxonomyId).getOrFail("Taxonomy") - organisations <- Try(organisationSrv.startTraversal.filterNot(_ - .out[OrganisationTaxonomy] - .v[Taxonomy] - .has(_.namespace, taxo.namespace) - ).toSeq) - _ <- organisations.toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) - } yield Success(()) - - def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { taxo <- get(taxonomyId).getOrFail("Taxonomy") - _ <- Try(organisationSrv.startTraversal - .hasNot(_.name, "admin") - .outE[OrganisationTaxonomy] - .filter(_.otherV.v[Taxonomy].has(_.namespace, taxo.namespace)) - .remove()) - } yield Success(()) + _ <- if (taxo.namespace.startsWith("_freetags")) Failure(BadRequestError("Cannot activate a freetags taxonomy")) + else Success(()) + _ <- organisationSrv.startTraversal + .filterNot(_.out[OrganisationTaxonomy].v[Taxonomy].has(_.namespace, taxo.namespace)) + .toSeq + .toTry(o => organisationTaxonomySrv.create(OrganisationTaxonomy(), o, taxo)) + } yield () + + def deactivate(taxonomyId: EntityIdOrName)(implicit graph: Graph): Try[Unit] = { + for { + taxo <- getOrFail(taxonomyId) + _ <- if (taxo.namespace.startsWith("_freetags")) Failure(BadRequestError("Cannot deactivate a freetags taxonomy")) + else Success(()) + } yield get(taxonomyId).inE[OrganisationTaxonomy].remove() + } } @@ -83,13 +73,21 @@ object TaxonomyOps { def getByNamespace(namespace: String): Traversal.V[Taxonomy] = traversal.has(_.namespace, namespace) - def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = visible(authContext.organisation) + def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = { + if (authContext.isPermitted(Permissions.manageTaxonomy)) + traversal + else + traversal.filter(_.organisations.get(authContext.organisation)) + } - def visible(organisationIdOrName: EntityIdOrName): Traversal.V[Taxonomy] = - traversal.filter(_.organisations.get(organisationIdOrName)) + def alreadyImported(namespace: String): Boolean = + traversal.getByNamespace(namespace).exists def organisations: Traversal.V[Organisation] = traversal.in[OrganisationTaxonomy].v[Organisation] + def enabled: Traversal[Boolean, Boolean, Identity[Boolean]] = + traversal.choose(_.organisations, true, false) + def tags: Traversal.V[Tag] = traversal.out[TaxonomyTag].v[Tag] def richTaxonomy: Traversal[RichTaxonomy, JMap[String, Any], Converter[RichTaxonomy, JMap[String, Any]]] = @@ -99,5 +97,22 @@ object TaxonomyOps { .by(_.tags.fold) ) .domainMap { case (taxonomy, tags) => RichTaxonomy(taxonomy, tags) } + + def richTaxonomyWithCustomRenderer[D, G, C <: Converter[D, G]](entityRenderer: Traversal.V[Taxonomy] => Traversal[D, G, C]): + Traversal[(RichTaxonomy, D), JMap[String, Any], Converter[(RichTaxonomy, D), JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.tags.fold) + .by(_.enabled) + .by(entityRenderer) + ) + .domainMap { + case (taxo, tags, _, renderedEntity) => + RichTaxonomy( + taxo, + tags + ) -> renderedEntity + } } } From aac858a949419f46c8afa776a33f334ef1359ca3 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 10 Dec 2020 14:09:14 +0100 Subject: [PATCH 50/93] Removed freetags from admin list taxonomies --- thehive/app/org/thp/thehive/services/TaxonomySrv.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index ec120177d5..a652192711 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -1,5 +1,7 @@ package org.thp.thehive.services +import org.apache.tinkerpop.gremlin.process.traversal.TextP + import java.util.{Map => JMap} import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph @@ -75,11 +77,14 @@ object TaxonomyOps { def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = { if (authContext.isPermitted(Permissions.manageTaxonomy)) - traversal + traversal.noFreetags else traversal.filter(_.organisations.get(authContext.organisation)) } + private def noFreetags: Traversal.V[Taxonomy] = + traversal.filterNot(_.has(_.namespace, TextP.startingWith("_freetags"))) + def alreadyImported(namespace: String): Boolean = traversal.getByNamespace(namespace).exists From c1a6f7dfc54d78be1771dcb7f892f57fb2939fde Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 10 Dec 2020 15:04:04 +0100 Subject: [PATCH 51/93] Zip import can fail and still continue importing --- .../thehive/controllers/v1/TaxonomyCtrl.scala | 18 +++++++++++++----- .../org/thp/thehive/services/TaxonomySrv.scala | 2 +- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 8c1358f137..778f4e9e1f 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -16,11 +16,11 @@ import org.thp.thehive.dto.v1.InputTaxonomy import org.thp.thehive.models.{Permissions, RichTaxonomy, Tag, Taxonomy} import org.thp.thehive.services.TaxonomyOps._ import org.thp.thehive.services.{TagSrv, TaxonomySrv} -import play.api.libs.json.Json +import play.api.libs.json.{JsArray, Json} import play.api.mvc.{Action, AnyContent, Results} import scala.collection.JavaConverters._ -import scala.util.{Failure, Try} +import scala.util.{Failure, Success, Try} class TaxonomyCtrl @Inject() ( entrypoint: Entrypoint, @@ -85,9 +85,17 @@ class TaxonomyCtrl @Inject() ( inputTaxos <- headers .filter(h => h.getFileName.endsWith("machinetag.json")) .toTry(parseJsonFile(zipFile, _)) - richTaxos <- db.tryTransaction { implicit graph => - inputTaxos.toTry(inputTaxo => createFromInput(inputTaxo)).map(_.toJson) - } + richTaxos = inputTaxos.foldLeft[JsArray](JsArray.empty)((array, taxo) => { + val res = db.tryTransaction { implicit graph => + createFromInput(taxo) + } match { + case Failure(e) => + Json.obj("namespace" -> taxo.namespace, "importState" -> s"Failure : ${e.getMessage}") + case Success(t) => + Json.obj("namespace" -> t.namespace, "importState" -> s"Success : ${t.tags.size} tags imported") + } + array :+ res + }) } yield Results.Created(richTaxos) } diff --git a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala index a652192711..2c57ba30e4 100644 --- a/thehive/app/org/thp/thehive/services/TaxonomySrv.scala +++ b/thehive/app/org/thp/thehive/services/TaxonomySrv.scala @@ -77,7 +77,7 @@ object TaxonomyOps { def visible(implicit authContext: AuthContext): Traversal.V[Taxonomy] = { if (authContext.isPermitted(Permissions.manageTaxonomy)) - traversal.noFreetags + noFreetags else traversal.filter(_.organisations.get(authContext.organisation)) } From 213d94c101b4fccd91b7baee1891d926514eb936 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 10 Dec 2020 15:16:51 +0100 Subject: [PATCH 52/93] Changed field names for zip import --- thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index 778f4e9e1f..cd73717175 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -90,9 +90,9 @@ class TaxonomyCtrl @Inject() ( createFromInput(taxo) } match { case Failure(e) => - Json.obj("namespace" -> taxo.namespace, "importState" -> s"Failure : ${e.getMessage}") + Json.obj("namespace" -> taxo.namespace, "status" -> "Failure", "message" -> e.getMessage) case Success(t) => - Json.obj("namespace" -> t.namespace, "importState" -> s"Success : ${t.tags.size} tags imported") + Json.obj("namespace" -> t.namespace, "status" -> "Success", "tagsImported" -> t.tags.size) } array :+ res }) From 457483078b9ede9b3491ca0ee37917b52047e04c Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 10 Dec 2020 16:12:14 +0100 Subject: [PATCH 53/93] Fixed broken tests --- .../controllers/v1/TaxonomyCtrlTest.scala | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala index 68c4627b49..917a6fb6f3 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -4,7 +4,7 @@ import org.thp.scalligraph.controllers.FakeTemporaryFile import org.thp.thehive.TestAppBuilder import org.thp.thehive.dto.v1._ import play.api.libs.Files -import play.api.libs.json.Json +import play.api.libs.json.{JsArray, Json} import play.api.mvc.MultipartFormData.FilePart import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.test.{FakeRequest, PlaySpecification} @@ -144,8 +144,8 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] - zipTaxos.size must beEqualTo(2) + contentAsString(result) must not contain("Failure") + contentAsJson(result).as[JsArray].value.size must beEqualTo(2) } "import zip file with folders correctly" in testApp { app => @@ -156,8 +156,8 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] - zipTaxos.size must beEqualTo(2) + contentAsString(result) must not contain("Failure") + contentAsJson(result).as[JsArray].value.size must beEqualTo(2) } "return no error if zip file contains other files than taxonomies" in testApp { app => @@ -168,8 +168,8 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - val zipTaxos = contentAsJson(result).as[Seq[OutputTaxonomy]] - zipTaxos.size must beEqualTo(1) + contentAsString(result) must not contain("Failure") + contentAsJson(result).as[JsArray].value.size must beEqualTo(1) } "return error if zip file contains an already present taxonomy" in testApp { app => @@ -178,9 +178,9 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { .withBody(AnyContentAsMultipartFormData(multipartZipFile("machinetag-present.zip"))) val result = app[TaxonomyCtrl].importZip(request) - status(result) must beEqualTo(400).updateMessage(s => s"$s\n${contentAsString(result)}") - (contentAsJson(result) \ "type").as[String] must beEqualTo("BadRequest") - (contentAsJson(result) \ "message").as[String] must contain("already exists") + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + contentAsString(result) must contain("Failure") + contentAsJson(result).as[JsArray].value.size must beEqualTo(2) } "return error if zip file contains a bad formatted taxonomy" in testApp { app => From 04afbe63f1c73d3be7b4344adeb967b39be72274 Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Thu, 10 Dec 2020 22:05:25 +0100 Subject: [PATCH 54/93] #1668 WIP: Update taxonomies list admin page: add filtering, pagination, enable/disable, delete --- .../admin/taxonomy/TaxonomyListCtrl.js | 139 ++++++++++++++---- .../app/scripts/services/api/TaxonomySrv.js | 14 -- frontend/app/styles/main.css | 7 + .../views/partials/admin/taxonomy/list.html | 65 ++++++-- .../partials/admin/taxonomy/list/filters.html | 38 +++++ .../partials/admin/taxonomy/list/toolbar.html | 9 ++ 6 files changed, 218 insertions(+), 54 deletions(-) create mode 100644 frontend/app/views/partials/admin/taxonomy/list/filters.html diff --git a/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js index 47661db02f..3763c96c44 100644 --- a/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js +++ b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js @@ -5,19 +5,43 @@ .controller('TaxonomyListCtrl', TaxonomyListCtrl) .controller('TaxonomyImportCtrl', TaxonomyImportCtrl); - function TaxonomyListCtrl($uibModal, TaxonomySrv, NotificationSrv, ModalSrv, appConfig) { + function TaxonomyListCtrl($scope, $uibModal, PaginatedQuerySrv, FilteringSrv, TaxonomySrv, NotificationSrv, ModalSrv, appConfig) { var self = this; this.appConfig = appConfig; self.load = function() { - TaxonomySrv.list() - .then(function(response) { - self.list = response; - }) - .catch(function(rejection) { - NotificationSrv.error('Taxonomies management', rejection.data, rejection.status); - }); + this.loading = true; + + // TaxonomySrv.list() + // .then(function(response) { + // self.list = response; + // }) + // .catch(function(rejection) { + // NotificationSrv.error('Taxonomies management', rejection.data, rejection.status); + // }) + // .finally(function(){ + // //self.loading = false; + // }); + + this.list = new PaginatedQuerySrv({ + name: 'taxonomies', + root: undefined, + objectType: 'taxonomy', + version: 'v1', + scope: $scope, + sort: self.filtering.context.sort, + loadAll: false, + pageSize: self.filtering.context.pageSize, + filter: this.filtering.buildQuery(), + operations: [ + {'_name': 'listTaxonomy'} + ], + extraData: ['enabled'], + onUpdate: function() { + self.loading = false; + } + }); }; self.import = function () { @@ -26,7 +50,10 @@ templateUrl: 'views/partials/admin/taxonomy/import.html', controller: 'TaxonomyImportCtrl', controllerAs: '$vm', - size: 'lg' + size: 'lg', + resolve: { + appConfig: self.appConfig + } }); modalInstance.result @@ -40,10 +67,12 @@ }); }; - this.toggleActive = function(id, active) { - TaxonomySrv.toggleActive(id, active) + this.toggleActive = function(taxonomy) { + var active = !taxonomy.extraData.enabled; + + TaxonomySrv.toggleActive(taxonomy._id, active) .then(function() { - NotificationSrv.log('Taxonomy has been successfully ' + active ? 'activated' : 'deactivated', 'success'); + NotificationSrv.log(['Taxonomy [', taxonomy.namespace, '] has been successfully', (active ? 'activated' : 'deactivated')].join(' '), 'success'); self.load(); }) @@ -54,36 +83,86 @@ }); }; - self.update = function(id, taxonomy) { - // TODO - // TaxonomySrv.update(id, _.pick(taxonomy, '...')) - TaxonomySrv.update(id, _.pick(taxonomy, '...')) - .then(function(/*response*/) { + self.remove = function(taxonomy) { + var modalInstance = ModalSrv.confirm( + 'Remove taxonomy', + 'Are you sure you want to remove the selected taxonomy?', { + flavor: 'danger', + okText: 'Yes, remove it' + } + ); + + modalInstance.result + .then(function() { + return TaxonomySrv.remove(taxonomy._id); + }) + .then(function( /*response*/ ) { self.load(); - NotificationSrv.log('Taxonomy updated successfully', 'success'); + NotificationSrv.success( + 'Taxonomy ' + taxonomy.namespace + ' has been successfully removed.' + ); }) .catch(function(err) { - NotificationSrv.error('Error', 'Taxonomy update failed', err.status); + if (err && !_.isString(err)) { + NotificationSrv.error('TaxonomyListCtrl', err.data, err.status); + } }); }; - self.create = function(taxonomy) { - TaxonomySrv.create(taxonomy) - .then(function(/*response*/) { - self.load(); - NotificationSrv.log('Taxonomy created successfully', 'success'); - }) - .catch(function(err) { - NotificationSrv.error('Error', 'Taxonomy creation failed', err.status); - }); + this.toggleFilters = function () { + this.filtering.toggleFilters(); }; - self.$onInit = function() { + this.filter = function () { + self.filtering.filter().then(this.applyFilters); + }; + + this.clearFilters = function () { + this.filtering.clearFilters() + .then(self.search); + }; + + this.removeFilter = function (index) { + self.filtering.removeFilter(index) + .then(self.search); + }; + + this.search = function () { self.load(); + self.filtering.storeContext(); + }; + this.addFilterValue = function (field, value) { + this.filtering.addFilterValue(field, value); + this.search(); + }; + + self.$onInit = function() { + //self.load(); + + self.filtering = new FilteringSrv('taxonomy', 'taxonomy.list', { + version: 'v1', + defaults: { + showFilters: true, + showStats: false, + pageSize: 15, + sort: ['+namespace'] + }, + defaultFilter: [] + }); + + self.filtering.initContext('list') + .then(function() { + self.load(); + + $scope.$watch('$vm.list.pageSize', function (newValue) { + self.filtering.setPageSize(newValue); + }); + }); }; } - function TaxonomyImportCtrl($uibModalInstance, TaxonomySrv, NotificationSrv) { + function TaxonomyImportCtrl($uibModalInstance, TaxonomySrv, NotificationSrv, appConfig) { + this.appConfig = appConfig; this.formData = {}; this.ok = function () { diff --git a/frontend/app/scripts/services/api/TaxonomySrv.js b/frontend/app/scripts/services/api/TaxonomySrv.js index 747b9a4a52..70aafedca8 100644 --- a/frontend/app/scripts/services/api/TaxonomySrv.js +++ b/frontend/app/scripts/services/api/TaxonomySrv.js @@ -2,21 +2,14 @@ 'use strict'; angular.module('theHiveServices') .service('TaxonomySrv', function($http, QuerySrv) { - // var self = this; var baseUrl = './api/v1/taxonomy'; this.list = function() { - // return $http.get(baseUrl, {params: { - // range: 'all' - // }}); - // return QuerySrv.call('v1', [ { _name: 'listTaxonomy' } ], { name:'list-taxonomies' }); - - //listTaxonomies }; this.get = function(name) { @@ -27,13 +20,6 @@ return $http.put([baseUrl, id, !!active ? 'activate' : 'deactivate'].join('/')); }; - // this.map = function() { - // return self.list() - // .then(function(response) { - // return _.indexBy(response.data, 'name'); - // }); - // }; - this.create = function(profile) { return $http.post(baseUrl, profile); }; diff --git a/frontend/app/styles/main.css b/frontend/app/styles/main.css index 0425ebbcd9..7830c6cc1a 100644 --- a/frontend/app/styles/main.css +++ b/frontend/app/styles/main.css @@ -76,6 +76,13 @@ body { text-align: center; padding: 40px; } +.loading-message { + background-color: #f5f5f5; + color: #AAA; + font-size: 18px; + text-align: center; + padding: 40px; +} .tpad50 { padding-top: 50px diff --git a/frontend/app/views/partials/admin/taxonomy/list.html b/frontend/app/views/partials/admin/taxonomy/list.html index e8eb9c5aa0..734bb087c4 100644 --- a/frontend/app/views/partials/admin/taxonomy/list.html +++ b/frontend/app/views/partials/admin/taxonomy/list.html @@ -7,26 +7,55 @@

    List of taxonomies

    -
    +
    + +
    +
    + + +
    +
    + +
    No taxnomies found.
    +
    +
    +
    + + loading taxonomies... +
    +
    +
    + -
    +
    + +
    - + + - + - + + -
    NameNamespace Description # Tags
    + +
    {{::taxonomy.namespace}} @@ -38,17 +67,33 @@

    List of taxonomies

    {{::taxonomy.tags.length}} - - Edit + + - - Delete
    + +
    diff --git a/frontend/app/views/partials/admin/taxonomy/list/filters.html b/frontend/app/views/partials/admin/taxonomy/list/filters.html new file mode 100644 index 0000000000..431c826ca5 --- /dev/null +++ b/frontend/app/views/partials/admin/taxonomy/list/filters.html @@ -0,0 +1,38 @@ +
    +
    +

    Filters

    +
    +
    +
    +
    + + + + +
    +
    +
    + +
    +
    +
    + +
    +
    + +
    +
    diff --git a/frontend/app/views/partials/admin/taxonomy/list/toolbar.html b/frontend/app/views/partials/admin/taxonomy/list/toolbar.html index e7b922a3d6..87fe159685 100644 --- a/frontend/app/views/partials/admin/taxonomy/list/toolbar.html +++ b/frontend/app/views/partials/admin/taxonomy/list/toolbar.html @@ -7,6 +7,15 @@ Import taxonomies
    +
    + +
    + +
    + +
    From e9ba4aa196d3cd41e56341acf5f9f016e9f62fee Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Fri, 11 Dec 2020 06:34:13 +0100 Subject: [PATCH 55/93] #1668 WIP: add taonomy view dialog to list tags --- frontend/app/index.html | 1 + .../components/common/tag.component.js | 22 +++++++ .../admin/taxonomy/TaxonomyListCtrl.js | 52 +++++++++++---- .../components/common/tag.component.html | 1 + .../views/partials/admin/taxonomy/list.html | 10 ++- .../views/partials/admin/taxonomy/view.html | 65 +++++++++++++++++++ 6 files changed, 135 insertions(+), 16 deletions(-) create mode 100644 frontend/app/scripts/components/common/tag.component.js create mode 100644 frontend/app/views/components/common/tag.component.html create mode 100644 frontend/app/views/partials/admin/taxonomy/view.html diff --git a/frontend/app/index.html b/frontend/app/index.html index 9a4760768f..57ad3fda5a 100644 --- a/frontend/app/index.html +++ b/frontend/app/index.html @@ -139,6 +139,7 @@ + diff --git a/frontend/app/scripts/components/common/tag.component.js b/frontend/app/scripts/components/common/tag.component.js new file mode 100644 index 0000000000..8876df5802 --- /dev/null +++ b/frontend/app/scripts/components/common/tag.component.js @@ -0,0 +1,22 @@ +(function() { + 'use strict'; + + angular.module('theHiveComponents') + .component('tag', { + controller: function() { + this.$onInit = function() { + this.tag = _.without([ + this.value.namespace, + ':', + this.value.predicate, + this.value.value ? ("=\"" + this.value.value + "\"") : null + ], null).join(''); + }; + }, + controllerAs: '$ctrl', + templateUrl: 'views/components/common/tag.component.html', + bindings: { + value: '<' + } + }); +})(); diff --git a/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js index 3763c96c44..78a1d5c29b 100644 --- a/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js +++ b/frontend/app/scripts/controllers/admin/taxonomy/TaxonomyListCtrl.js @@ -3,6 +3,7 @@ angular.module('theHiveControllers') .controller('TaxonomyListCtrl', TaxonomyListCtrl) + .controller('TaxonomyDialogCtrl', TaxonomyDialogCtrl) .controller('TaxonomyImportCtrl', TaxonomyImportCtrl); function TaxonomyListCtrl($scope, $uibModal, PaginatedQuerySrv, FilteringSrv, TaxonomySrv, NotificationSrv, ModalSrv, appConfig) { @@ -13,17 +14,6 @@ self.load = function() { this.loading = true; - // TaxonomySrv.list() - // .then(function(response) { - // self.list = response; - // }) - // .catch(function(rejection) { - // NotificationSrv.error('Taxonomies management', rejection.data, rejection.status); - // }) - // .finally(function(){ - // //self.loading = false; - // }); - this.list = new PaginatedQuerySrv({ name: 'taxonomies', root: undefined, @@ -44,6 +34,32 @@ }); }; + self.show = function(taxonomy) { + // var modalInstance = $uibModal.open({ + + $uibModal.open({ + animation: true, + templateUrl: 'views/partials/admin/taxonomy/view.html', + controller: 'TaxonomyDialogCtrl', + controllerAs: '$modal', + size: 'max', + resolve: { + taxonomy: angular.copy(taxonomy) + } + }); + + // modalInstance.result + // .then(function() { + // self.load(); + // }) + // .catch(function(err){ + // if(err && !_.isString(err)) { + // NotificationSrv.error('Taxonomies import', err.data, err.status); + // } + // }); + }; + + self.import = function () { var modalInstance = $uibModal.open({ animation: true, @@ -137,8 +153,6 @@ }; self.$onInit = function() { - //self.load(); - self.filtering = new FilteringSrv('taxonomy', 'taxonomy.list', { version: 'v1', defaults: { @@ -161,6 +175,18 @@ }; } + function TaxonomyDialogCtrl($uibModalInstance, TaxonomySrv, NotificationSrv, taxonomy) { + this.taxonomy = taxonomy; + + this.ok = function () { + $uibModalInstance.close(); + }; + + this.cancel = function () { + $uibModalInstance.dismiss('cancel'); + }; + } + function TaxonomyImportCtrl($uibModalInstance, TaxonomySrv, NotificationSrv, appConfig) { this.appConfig = appConfig; this.formData = {}; diff --git a/frontend/app/views/components/common/tag.component.html b/frontend/app/views/components/common/tag.component.html new file mode 100644 index 0000000000..9646fdddcf --- /dev/null +++ b/frontend/app/views/components/common/tag.component.html @@ -0,0 +1 @@ +{{$ctrl.tag}} diff --git a/frontend/app/views/partials/admin/taxonomy/list.html b/frontend/app/views/partials/admin/taxonomy/list.html index 734bb087c4..6d10f519f5 100644 --- a/frontend/app/views/partials/admin/taxonomy/list.html +++ b/frontend/app/views/partials/admin/taxonomy/list.html @@ -44,6 +44,7 @@

    List of taxonomies

    Namespace Description + Version # Tags @@ -58,12 +59,15 @@

    List of taxonomies

    - {{::taxonomy.namespace}} + {{::taxonomy.namespace}}
    {{::taxonomy.description}} + + {{::taxonomy.version}} + {{::taxonomy.tags.length}} @@ -78,8 +82,8 @@

    List of taxonomies

    }[!!taxonomy.extraData.enabled]"> - - + + diff --git a/frontend/app/views/partials/admin/taxonomy/view.html b/frontend/app/views/partials/admin/taxonomy/view.html new file mode 100644 index 0000000000..3a50127b1c --- /dev/null +++ b/frontend/app/views/partials/admin/taxonomy/view.html @@ -0,0 +1,65 @@ +
    + + + + + +
    From 745fcea71a3f7dba4a5bc1f50ea50f44841097f9 Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Fri, 11 Dec 2020 09:19:48 +0100 Subject: [PATCH 56/93] #1668 WIP: allow filtering by taxonomy status --- frontend/app/views/partials/admin/taxonomy/list.html | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/app/views/partials/admin/taxonomy/list.html b/frontend/app/views/partials/admin/taxonomy/list.html index 6d10f519f5..f8aa93d385 100644 --- a/frontend/app/views/partials/admin/taxonomy/list.html +++ b/frontend/app/views/partials/admin/taxonomy/list.html @@ -52,10 +52,12 @@

    List of taxonomies

    - + + +
    From eae7bbc091331eaf679fce3a05e89c1afdb2fcb2 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Fri, 11 Dec 2020 18:55:26 +0100 Subject: [PATCH 57/93] WIP Changed tag colour property type - Cortex tests failing --- ScalliGraph | 2 +- .../scala/org/thp/thehive/dto/v0/Tag.scala | 4 +- .../scala/org/thp/thehive/dto/v1/Tag.scala | 2 +- .../src/main/scala/org/thp/misp/dto/Tag.scala | 7 +-- .../misp/services/MispImportSrvTest.scala | 2 +- .../thehive/controllers/v0/AlertCtrl.scala | 2 +- .../thp/thehive/controllers/v0/CaseCtrl.scala | 2 +- .../controllers/v0/CaseTemplateCtrl.scala | 2 +- .../controllers/v0/ObservableCtrl.scala | 12 +++-- .../thp/thehive/controllers/v0/TagCtrl.scala | 12 ++--- .../controllers/v1/ObservableCtrl.scala | 10 ++--- .../thehive/controllers/v1/Properties.scala | 44 +++++++------------ .../thehive/controllers/v1/TaxonomyCtrl.scala | 4 +- .../controllers/v1/TaxonomyRenderer.scala | 8 ++-- thehive/app/org/thp/thehive/models/Tag.scala | 15 +++---- .../models/TheHiveSchemaDefinition.scala | 11 +++++ .../thp/thehive/services/AttachmentSrv.scala | 2 +- .../app/org/thp/thehive/services/TagSrv.scala | 18 ++------ .../controllers/v0/ConfigCtrlTest.scala | 8 +++- .../controllers/v1/TaxonomyCtrlTest.scala | 14 +++--- .../test/org/thp/thehive/models/TagTest.scala | 16 +++---- thehive/test/resources/data/Tag.json | 22 +++++----- 22 files changed, 97 insertions(+), 122 deletions(-) diff --git a/ScalliGraph b/ScalliGraph index 856e64f3e1..9aa06293e3 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit 856e64f3e1b262821a9d5b8c402ebc13f7562f18 +Subproject commit 9aa06293e32254466d1c5d7ae089755fdfbbe4e0 diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Tag.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Tag.scala index 46cdc7fd5e..d994f6fc38 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v0/Tag.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v0/Tag.scala @@ -2,13 +2,13 @@ package org.thp.thehive.dto.v0 import play.api.libs.json.{Json, OFormat, OWrites} -case class InputTag(namespace: String, predicate: String, value: Option[String], description: Option[String], colour: Option[Int]) +case class InputTag(namespace: String, predicate: String, value: Option[String], description: Option[String], colour: Option[String]) object InputTag { implicit val writes: OWrites[InputTag] = Json.writes[InputTag] } -case class OutputTag(namespace: String, predicate: String, value: Option[String], description: Option[String], colour: Int) +case class OutputTag(namespace: String, predicate: String, value: Option[String], description: Option[String], colour: String) object OutputTag { implicit val format: OFormat[OutputTag] = Json.format[OutputTag] diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala index 3b536c867c..13f6d33193 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Tag.scala @@ -7,7 +7,7 @@ case class OutputTag( predicate: String, value: Option[String], description: Option[String], - colour: Int + colour: String ) object OutputTag { diff --git a/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala b/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala index 683b1ee489..eef50fdcdc 100644 --- a/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala +++ b/misp/client/src/main/scala/org/thp/misp/dto/Tag.scala @@ -6,7 +6,7 @@ import play.api.libs.json._ case class Tag( id: Option[String], name: String, - colour: Option[Int], + colour: Option[String], exportable: Option[Boolean] ) @@ -14,10 +14,7 @@ object Tag { implicit val reads: Reads[Tag] = ((JsPath \ "id").readNullable[String] and (JsPath \ "name").read[String] and - (JsPath \ "colour").readNullable[String].map { - case Some(c) if c.headOption.contains('#') => Some(Integer.parseUnsignedInt(c.tail, 16)) - case _ => None - } and + (JsPath \ "colour").readNullable[String] and (JsPath \ "exportable").readNullable[Boolean])(Tag.apply _) implicit val writes: Writes[Tag] = Json.writes[Tag] diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala index 915ef429c0..4062eaef86 100644 --- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala +++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala @@ -65,7 +65,7 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification attributeCount = Some(11), distribution = 1, attributes = Nil, - tags = Seq(Tag(Some("1"), "TH-test", Some(0x36a3a3), None), Tag(Some("2"), "TH-test-2", Some(0x1ac7c7), None)) + tags = Seq(Tag(Some("1"), "TH-test", Some("#36a3a3"), None), Tag(Some("2"), "TH-test-2", Some("#1ac7c7"), None)) ) ) } diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala index d0ba0e18fe..7d8ac401ea 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala @@ -391,7 +391,7 @@ class PublicAlert @Inject() ( val namespace = UMapping.string.getProperty(v, "namespace") val predicate = UMapping.string.getProperty(v, "predicate") val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString + Tag(namespace, predicate, value, None, "#000000").toString }, Converter.identity[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala index 71dff80ff5..afcbf55831 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala @@ -245,7 +245,7 @@ class PublicCase @Inject() ( val namespace = UMapping.string.getProperty(v, "namespace") val predicate = UMapping.string.getProperty(v, "predicate") val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString + Tag(namespace, predicate, value, None, "#000000").toString }, Converter.identity[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala index 5a1d824314..e8845fd84c 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala @@ -129,7 +129,7 @@ class PublicCaseTemplate @Inject() ( val namespace = UMapping.string.getProperty(v, "namespace") val predicate = UMapping.string.getProperty(v, "predicate") val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString + Tag(namespace, predicate, value, None, "#000000").toString }, Converter.identity[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index 629711357d..2b280f0eae 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -1,13 +1,8 @@ package org.thp.thehive.controllers.v0 -import java.io.FilterInputStream -import java.nio.file.Files - -import javax.inject.{Inject, Named, Singleton} import net.lingala.zip4j.ZipFile import net.lingala.zip4j.model.FileHeader import org.thp.scalligraph._ -import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query._ @@ -27,6 +22,9 @@ import play.api.libs.Files.DefaultTemporaryFileCreator import play.api.libs.json.{JsArray, JsObject, JsValue, Json} import play.api.mvc.{Action, AnyContent, Results} +import java.io.FilterInputStream +import java.nio.file.Files +import javax.inject.{Inject, Named, Singleton} import scala.collection.JavaConverters._ import scala.util.Success @@ -214,7 +212,7 @@ class ObservableCtrl @Inject() ( } } - private def getZipFiles(observable: InputObservable, zipPassword: Option[String])(implicit authContext: AuthContext): Seq[InputObservable] = + private def getZipFiles(observable: InputObservable, zipPassword: Option[String]): Seq[InputObservable] = observable.attachment.toSeq.flatMap { attachment => val zipFile = new ZipFile(attachment.filepath.toFile) val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] @@ -288,7 +286,7 @@ class PublicObservable @Inject() ( val namespace = UMapping.string.getProperty(v, "namespace") val predicate = UMapping.string.getProperty(v, "predicate") val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString + Tag(namespace, predicate, value, None, "#000000").toString }, Converter.identity[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala index 45a85f7abf..a6045ef70f 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/TagCtrl.scala @@ -17,8 +17,6 @@ import org.thp.thehive.services.TagSrv import play.api.libs.json.{JsNumber, JsObject, JsValue, Json} import play.api.mvc.{Action, AnyContent, Results} -import scala.util.Try - class TagCtrl @Inject() ( override val entrypoint: Entrypoint, @Named("with-thehive-schema") override val db: Database, @@ -68,13 +66,10 @@ class TagCtrl @Inject() ( colour = (entry \ "colour") .asOpt[String] - .map(parseColour) - .getOrElse(0) // black + .getOrElse("#000000") e = (entry \ "description").asOpt[String] orElse (entry \ "expanded").asOpt[String] } yield Tag(namespace, predicate, Some(v), e, colour) - def parseColour(colour: String): Int = if (colour(0) == '#') Try(Integer.parseUnsignedInt(colour.tail, 16)).getOrElse(0) else 0 - private def distinct(valueOpt: Option[String], acc: (Seq[JsObject], Seq[String]), v: JsObject): (Seq[JsObject], Seq[String]) = if (valueOpt.isDefined && acc._2.contains(valueOpt.get)) acc else (acc._1 :+ v, valueOpt.fold(acc._2)(acc._2 :+ _)) @@ -90,8 +85,7 @@ class TagCtrl @Inject() ( colour = (predicate \ "colour") .asOpt[String] - .map(parseColour) - .getOrElse(0) // black + .getOrElse("#000000") } yield Tag(namespace, v, None, e, colour) def get(tagId: String): Action[AnyContent] = @@ -142,7 +136,7 @@ class PublicTag @Inject() (tagSrv: TagSrv) extends PublicData { val namespace = UMapping.string.getProperty(v, "namespace") val predicate = UMapping.string.getProperty(v, "predicate") val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString + Tag(namespace, predicate, value, None, "#000000").toString }, Converter.identity[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index f383a7a025..c81e9e7c9b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -1,13 +1,8 @@ package org.thp.thehive.controllers.v1 -import java.io.FilterInputStream -import java.nio.file.Files - -import javax.inject.{Inject, Named, Singleton} import net.lingala.zip4j.ZipFile import net.lingala.zip4j.model.FileHeader import org.thp.scalligraph._ -import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ import org.thp.scalligraph.models.Database import org.thp.scalligraph.query.{ParamQuery, PropertyUpdater, PublicProperties, Query} @@ -25,6 +20,9 @@ import play.api.libs.Files.DefaultTemporaryFileCreator import play.api.mvc.{Action, AnyContent, Results} import play.api.{Configuration, Logger} +import java.io.FilterInputStream +import java.nio.file.Files +import javax.inject.{Inject, Named, Singleton} import scala.collection.JavaConverters._ @Singleton @@ -196,7 +194,7 @@ class ObservableCtrl @Inject() ( } } - private def getZipFiles(observable: InputObservable, zipPassword: Option[String])(implicit authContext: AuthContext): Seq[InputObservable] = + private def getZipFiles(observable: InputObservable, zipPassword: Option[String]): Seq[InputObservable] = observable.attachment.toSeq.flatMap { attachment => val zipFile = new ZipFile(attachment.filepath.toFile) val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 54ff5f31a4..0c986745af 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -1,9 +1,6 @@ package org.thp.thehive.controllers.v1 -import java.lang.{Long => JLong} -import java.util.Date - -import javax.inject.{Inject, Named, Singleton} +import org.apache.tinkerpop.gremlin.structure.Vertex import org.thp.scalligraph.controllers.{FPathElem, FPathEmpty} import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query.{PublicProperties, PublicPropertyListBuilder} @@ -26,6 +23,9 @@ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.libs.json.{JsObject, JsValue, Json} +import java.lang.{Long => JLong} +import java.util.Date +import javax.inject.{Inject, Named, Singleton} import scala.util.Failure @Singleton @@ -65,12 +65,7 @@ class Properties @Inject() ( cases .tags .graphMap[String, String, Converter.Identity[String]]( - { v => - val namespace = UMapping.string.getProperty(v, "namespace") - val predicate = UMapping.string.getProperty(v, "predicate") - val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString - }, + vertexToTag, Converter.identity[String] ) ) @@ -171,12 +166,7 @@ class Properties @Inject() ( cases .tags .graphMap[String, String, Converter.Identity[String]]( - { v => - val namespace = UMapping.string.getProperty(v, "namespace") - val predicate = UMapping.string.getProperty(v, "predicate") - val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString - }, + vertexToTag, Converter.identity[String] ) ) @@ -356,12 +346,7 @@ class Properties @Inject() ( cases .tags .graphMap[String, String, Converter.Identity[String]]( - { v => - val namespace = UMapping.string.getProperty(v, "namespace") - val predicate = UMapping.string.getProperty(v, "predicate") - val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString - }, + vertexToTag, Converter.identity[String] ) ) @@ -465,12 +450,7 @@ class Properties @Inject() ( cases .tags .graphMap[String, String, Converter.Identity[String]]( - { v => - val namespace = UMapping.string.getProperty(v, "namespace") - val predicate = UMapping.string.getProperty(v, "predicate") - val value = UMapping.string.optional.getProperty(v, "value") - Tag(namespace, predicate, value, None, 0).toString - }, + vertexToTag, Converter.identity[String] ) ) @@ -500,4 +480,12 @@ class Properties @Inject() ( .property("version", UMapping.int)(_.field.readonly) .property("enabled", UMapping.boolean)(_.select(_.enabled).readonly) .build + + private def vertexToTag: Vertex => String = { v => + val namespace = UMapping.string.getProperty(v, "namespace") + val predicate = UMapping.string.getProperty(v, "predicate") + val value = UMapping.string.optional.getProperty(v, "value") + Tag(namespace, predicate, value, None, "#000000").toString + } + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala index cd73717175..77fcb66925 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyCtrl.scala @@ -49,7 +49,7 @@ class TaxonomyCtrl @Inject() ( { case (OutputParam(from, to, extraData), taxoSteps, authContext) => taxoSteps.richPage(from, to, extraData.contains("total")) { - _.richTaxonomyWithCustomRenderer(taxoStatsRenderer(extraData - "total")(authContext)) + _.richTaxonomyWithCustomRenderer(taxoStatsRenderer(extraData - "total")) } } ) @@ -114,7 +114,7 @@ class TaxonomyCtrl @Inject() ( value.predicate, Some(e.value), e.expanded, - e.colour.map(tagSrv.parseTagColour).getOrElse(tagSrv.defaultColour) + e.colour.getOrElse(tagSrv.defaultColour) ) ) }) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala index 07835754be..b5b45a8b89 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaxonomyRenderer.scala @@ -1,10 +1,9 @@ package org.thp.thehive.controllers.v1 -import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.traversal.TraversalOps._ -import org.thp.thehive.services.TaxonomyOps._ import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.thehive.models.Taxonomy +import org.thp.thehive.services.TaxonomyOps._ import play.api.libs.json._ import java.util.{Map => JMap} @@ -14,9 +13,8 @@ trait TaxonomyRenderer { def enabledStats: Traversal.V[Taxonomy] => Traversal[JsValue, Boolean, Converter[JsValue, Boolean]] = _.enabled.domainMap(l => JsBoolean(l)) - def taxoStatsRenderer(extraData: Set[String])(implicit - authContext: AuthContext - ): Traversal.V[Taxonomy] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => + def taxoStatsRenderer(extraData: Set[String]): + Traversal.V[Taxonomy] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => def addData[G]( name: String )(f: Traversal.V[Taxonomy] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ diff --git a/thehive/app/org/thp/thehive/models/Tag.scala b/thehive/app/org/thp/thehive/models/Tag.scala index 3ad58979a5..ee1264f61e 100644 --- a/thehive/app/org/thp/thehive/models/Tag.scala +++ b/thehive/app/org/thp/thehive/models/Tag.scala @@ -1,12 +1,9 @@ package org.thp.thehive.models -import java.util.Date - -import org.thp.scalligraph.{BuildVertexEntity, EntityId} -import org.thp.scalligraph.models.{DefineIndex, Entity, IndexType} +import org.thp.scalligraph.BuildVertexEntity +import org.thp.scalligraph.models.{DefineIndex, IndexType} import play.api.Logger -import scala.util.Try import scala.util.matching.Regex @DefineIndex(IndexType.unique, "namespace", "predicate", "value") @@ -16,7 +13,7 @@ case class Tag( predicate: String, value: Option[String], description: Option[String], - colour: Int + colour: String ) { override def hashCode(): Int = 31 * (31 * value.## + predicate.##) + namespace.## @@ -35,15 +32,15 @@ case class Tag( object Tag { lazy val logger: Logger = Logger(getClass) - val tagColour: Regex = "(.*)#(\\p{XDigit}{6})".r + val tagColour: Regex = "(.*)(#\\p{XDigit}{6})".r val namespacePredicateValue: Regex = "([^\".:=]+)[.:]([^\".=]+)=\"?([^\"]+)\"?".r val namespacePredicate: Regex = "([^\".:=]+)[.]([^\".=]+)".r val PredicateValue: Regex = "([^\".:=]+)[=:]\"?([^\"]+)\"?".r val predicate: Regex = "([^\".:=]+)".r - def fromString(tagName: String, defaultNamespace: String, defaultColour: Int = 0): Tag = { + def fromString(tagName: String, defaultNamespace: String, defaultColour: String = "#000000"): Tag = { val (name, colour) = tagName match { - case tagColour(n, c) => n -> Try(Integer.parseUnsignedInt(c, 16)).getOrElse(defaultColour) + case tagColour(n, c) => n -> c case _ => tagName -> defaultColour } name match { diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index c3af62d20a..caa97fca8d 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -57,6 +57,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { case error => logger.warn(s"Unable to remove lock on property $name: $error") } } + // TODO remove unused commented code ? // def removeIndexLock(name: String): Try[Unit] = // db.managementTransaction { mgmt => // Try(mgmt.setConsistency(mgmt.getGraphIndex(name), ConsistencyModifier.DEFAULT)) @@ -138,6 +139,16 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { Try(traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTaxonomy").iterate()) Success(()) } + .updateGraph("Remove colour property for Tags", "Tag") { traversal => + traversal.removeProperty("colour").iterate() + Success(()) + } + .removeProperty("Tag", "colour", usedOnlyByThisModel = true) + .addProperty[String]("Tag", "colour") + .updateGraph("Add property colour for Tags ", "Tag") { traversal => + traversal.raw.property("colour", "#000000").iterate() + Success(()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/thehive/app/org/thp/thehive/services/AttachmentSrv.scala b/thehive/app/org/thp/thehive/services/AttachmentSrv.scala index cc3165c5a3..fefca23f5f 100644 --- a/thehive/app/org/thp/thehive/services/AttachmentSrv.scala +++ b/thehive/app/org/thp/thehive/services/AttachmentSrv.scala @@ -81,7 +81,7 @@ object AttachmentOps { implicit class AttachmentOpsDefs(traversal: Traversal.V[Attachment]) { def getByAttachmentId(attachmentId: String): Traversal.V[Attachment] = traversal.has(_.attachmentId, attachmentId) - def visible(implicit authContext: AuthContext): Traversal.V[Attachment] = traversal // TODO + def visible: Traversal.V[Attachment] = traversal // TODO } } diff --git a/thehive/app/org/thp/thehive/services/TagSrv.scala b/thehive/app/org/thp/thehive/services/TagSrv.scala index d937be0d92..ddc6ab87da 100644 --- a/thehive/app/org/thp/thehive/services/TagSrv.scala +++ b/thehive/app/org/thp/thehive/services/TagSrv.scala @@ -29,20 +29,10 @@ class TagSrv @Inject() (appConfig: ApplicationConfig, @Named("integrity-check-ac def defaultNamespace: String = defaultNamespaceConfig.get - private val defaultColourConfig: ConfigItem[String, Int] = - appConfig.mapItem[String, Int]( - "tags.defaultColour", - "Default colour of the automatically created tags", - { - case s if s(0) == '#' => parseTagColour(s.tail) - case _ => defaultColour - } - ) - - def defaultColour: Int = defaultColourConfig.get - - // TODO Duplication in Tag.scala - def parseTagColour(c: String) = Try(Integer.parseUnsignedInt(c, 16)).getOrElse(defaultColour) + private val defaultColourConfig: ConfigItem[String, String] = + appConfig.item[String]("tags.defaultColour", "Default colour of the automatically created tags") + + def defaultColour: String = defaultColourConfig.get def parseString(tagName: String): Tag = Tag.fromString(tagName, defaultNamespace, defaultColour) diff --git a/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala index 46caf3274b..17e88bb138 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/ConfigCtrlTest.scala @@ -6,6 +6,9 @@ import play.api.libs.json.{JsObject, Json} import play.api.test.{FakeRequest, PlaySpecification} class ConfigCtrlTest extends PlaySpecification with TestAppBuilder { + +// TODO leave unused code ? +// // def getList = { // val request = FakeRequest("GET", "/api/config") // .withHeaders("user" -> "admin@thehive.local") @@ -36,9 +39,10 @@ class ConfigCtrlTest extends PlaySpecification with TestAppBuilder { status(result) must equalTo(204).updateMessage(s => s"$s\n${contentAsString(result)}") - app[TagSrv].defaultColour must beEqualTo(0xff00) + app[TagSrv].defaultColour must beEqualTo("#00FF00") } - +// TODO leave unused tests ? +// // "get user specific configuration" in testApp { app => // val request = FakeRequest("GET", "/api/config/user/organisation") // .withHeaders("user" -> "admin@thehive.local") diff --git a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala index 917a6fb6f3..21674a24c5 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/TaxonomyCtrlTest.scala @@ -68,9 +68,9 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { "A test taxonomy", 1, List( - OutputTag("test-taxo", "pred1", Some("entry1"), None, 0), - OutputTag("test-taxo", "pred2", Some("entry2"), None, 0), - OutputTag("test-taxo", "pred2", Some("entry21"), None, 0) + OutputTag("test-taxo", "pred1", Some("entry1"), None, "#000000"), + OutputTag("test-taxo", "pred2", Some("entry2"), None, "#000000"), + OutputTag("test-taxo", "pred2", Some("entry21"), None, "#000000") ) ) } @@ -123,7 +123,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { "taxonomy1", "The taxonomy 1", 1, - List(OutputTag("taxonomy1", "pred1", Some("value1"), None, 0)) + List(OutputTag("taxonomy1", "pred1", Some("value1"), None, "#000000")) ) } @@ -144,7 +144,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - contentAsString(result) must not contain("Failure") + contentAsString(result) must not contain "Failure" contentAsJson(result).as[JsArray].value.size must beEqualTo(2) } @@ -156,7 +156,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - contentAsString(result) must not contain("Failure") + contentAsString(result) must not contain "Failure" contentAsJson(result).as[JsArray].value.size must beEqualTo(2) } @@ -168,7 +168,7 @@ class TaxonomyCtrlTest extends PlaySpecification with TestAppBuilder { val result = app[TaxonomyCtrl].importZip(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") - contentAsString(result) must not contain("Failure") + contentAsString(result) must not contain "Failure" contentAsJson(result).as[JsArray].value.size must beEqualTo(1) } diff --git a/thehive/test/org/thp/thehive/models/TagTest.scala b/thehive/test/org/thp/thehive/models/TagTest.scala index 29a9021c47..24a4c59a58 100644 --- a/thehive/test/org/thp/thehive/models/TagTest.scala +++ b/thehive/test/org/thp/thehive/models/TagTest.scala @@ -4,43 +4,43 @@ import play.api.test.PlaySpecification class TagTest extends PlaySpecification { val defaultNamespace: String = "_default_namespace_" - val defaultColor: Int = 0xffff00 + val defaultColour: String = "#ffff00" - def parseTag(s: String): Tag = Tag.fromString(s, defaultNamespace, defaultColor) + def parseTag(s: String): Tag = Tag.fromString(s, defaultNamespace, defaultColour) "tag" should { "be parsed from key:value" in { val tag = parseTag("Module:atest_blah_blah") - tag must beEqualTo(Tag(defaultNamespace, "Module", Some("atest_blah_blah"), None, defaultColor)) + tag must beEqualTo(Tag(defaultNamespace, "Module", Some("atest_blah_blah"), None, defaultColour)) tag.toString must beEqualTo("Module=\"atest_blah_blah\"") } "be parsed from key:value=" in { val tag = parseTag("Id:7SeUoB3IBABD+tMh2PjVJYg==") - tag must beEqualTo(Tag(defaultNamespace, "Id", Some("7SeUoB3IBABD+tMh2PjVJYg=="), None, defaultColor)) + tag must beEqualTo(Tag(defaultNamespace, "Id", Some("7SeUoB3IBABD+tMh2PjVJYg=="), None, defaultColour)) tag.toString must beEqualTo("Id=\"7SeUoB3IBABD+tMh2PjVJYg==\"") } "be parsed from key: value" in { val tag = parseTag("domain: google.com") - tag must beEqualTo(Tag(defaultNamespace, "domain", Some("google.com"), None, defaultColor)) + tag must beEqualTo(Tag(defaultNamespace, "domain", Some("google.com"), None, defaultColour)) tag.toString must beEqualTo("domain=\"google.com\"") } "be parsed from key: a.b.c.d" in { val tag = parseTag("ip: 8.8.8.8") - tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, defaultColor)) + tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, defaultColour)) tag.toString must beEqualTo("ip=\"8.8.8.8\"") } "be parsed with colour" in { val tag = parseTag("ip:8.8.8.8#FF00FF") - tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, 0xFF00FF)) + tag must beEqualTo(Tag(defaultNamespace, "ip", Some("8.8.8.8"), None, "#FF00FF")) tag.toString must beEqualTo("ip=\"8.8.8.8\"") } "be parsed with hash sign and colour" in { val tag = parseTag("case:#42#FF00FF") - tag must beEqualTo(Tag(defaultNamespace, "case", Some("#42"), None, 0xFF00FF)) + tag must beEqualTo(Tag(defaultNamespace, "case", Some("#42"), None, "#FF00FF")) tag.toString must beEqualTo("case=\"#42\"") } } diff --git a/thehive/test/resources/data/Tag.json b/thehive/test/resources/data/Tag.json index 094be1895a..30908714a4 100644 --- a/thehive/test/resources/data/Tag.json +++ b/thehive/test/resources/data/Tag.json @@ -4,76 +4,76 @@ "namespace": "testNamespace", "predicate": "testPredicate", "value": "t1", - "colour": 0 + "colour": "#000000" }, { "id": "tagt2", "namespace": "testNamespace", "predicate": "testPredicate", "value": "t2", - "colour": 0 + "colour": "#000000" }, { "id": "tagt3", "namespace": "testNamespace", "predicate": "testPredicate", "value": "t3", - "colour": 0 + "colour": "#000000" }, { "id": "tagalert", "namespace": "testNamespace", "predicate": "testPredicate", "value": "alert", - "colour": 0 + "colour": "#000000" }, { "id": "tagtest", "namespace": "testNamespace", "predicate": "testPredicate", "value": "test", - "colour": 0 + "colour": "#000000" }, { "id": "tagspam", "namespace": "testNamespace", "predicate": "testPredicate", "value": "spam", - "colour": 0 + "colour": "#000000" }, { "id": "tagsrc:mail", "namespace": "testNamespace", "predicate": "testPredicate", "value": "src:mail", - "colour": 0 + "colour": "#000000" }, { "id": "tagtestDomain", "namespace": "testNamespace", "predicate": "testPredicate", "value": "testDomain", - "colour": 0 + "colour": "#000000" }, { "id": "taghello", "namespace": "testNamespace", "predicate": "testPredicate", "value": "hello", - "colour": 0 + "colour": "#000000" }, { "id": "tagworld", "namespace": "testNamespace", "predicate": "testPredicate", "value": "world", - "colour": 0 + "colour": "#000000" }, { "id": "taxonomy-tag1", "namespace": "taxonomy1", "predicate": "pred1", "value": "value1", - "colour": 0 + "colour": "#000000" } ] \ No newline at end of file From 9e4746772701f7f999abab4a872243baf7a69b57 Mon Sep 17 00:00:00 2001 From: To-om Date: Wed, 6 Jan 2021 08:55:22 +0100 Subject: [PATCH 58/93] #1454 Add queries and controler for shares in v1 --- .../scala/org/thp/thehive/dto/v1/Share.scala | 48 ++++ .../thehive/controllers/v0/ShareCtrl.scala | 3 +- .../thp/thehive/controllers/v1/CaseCtrl.scala | 6 +- .../thehive/controllers/v1/Conversion.scala | 8 + .../controllers/v1/ObservableCtrl.scala | 3 +- .../thehive/controllers/v1/Properties.scala | 26 +- .../thp/thehive/controllers/v1/Router.scala | 35 ++- .../thehive/controllers/v1/ShareCtrl.scala | 266 ++++++++++++++++++ .../thp/thehive/controllers/v1/TaskCtrl.scala | 7 +- .../controllers/v1/TheHiveQueryExecutor.scala | 2 + .../org/thp/thehive/services/ShareSrv.scala | 8 +- .../org/thp/thehive/services/TaskSrv.scala | 10 +- 12 files changed, 383 insertions(+), 39 deletions(-) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Share.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Share.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Share.scala new file mode 100644 index 0000000000..90dcfdfebc --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Share.scala @@ -0,0 +1,48 @@ +package org.thp.thehive.dto.v1 + +import org.thp.thehive.dto.v1.ObservablesFilter.ObservablesFilter +import org.thp.thehive.dto.v1.TasksFilter.TasksFilter +import play.api.libs.json.{Format, Json, Writes} + +import java.util.Date + +case class InputShare(organisationName: String, profile: String, tasks: TasksFilter, observables: ObservablesFilter) + +object TasksFilter extends Enumeration { + type TasksFilter = Value + + val all: TasksFilter = Value("all") + val none: TasksFilter = Value("none") + + implicit val format: Format[TasksFilter] = Json.formatEnum(TasksFilter) +} + +object ObservablesFilter extends Enumeration { + type ObservablesFilter = Value + + val all: ObservablesFilter = Value("all") + val none: ObservablesFilter = Value("none") + + implicit val format: Format[ObservablesFilter] = Json.formatEnum(ObservablesFilter) +} + +object InputShare { + implicit val writes: Writes[InputShare] = Json.writes[InputShare] +} + +case class OutputShare( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + caseId: String, + profileName: String, + organisationName: String, + owner: Boolean +) + +object OutputShare { + implicit val format: Format[OutputShare] = Json.format[OutputShare] +} diff --git a/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala index 4d3c6f890c..1ab624af99 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ShareCtrl.scala @@ -174,7 +174,8 @@ class ShareCtrl @Inject() ( val shares = caseSrv .get(EntityIdOrName(caseId)) .shares - .filter(_.organisation.filterNot(_.get(request.organisation)).visible) + .visible + .filterNot(_.get(request.organisation)) .richShare .toSeq diff --git a/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala index 0440346733..e52b9b8b75 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/CaseCtrl.scala @@ -1,6 +1,5 @@ package org.thp.thehive.controllers.v1 -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.query.{ParamQuery, PropertyUpdater, PublicProperties, Query} @@ -14,10 +13,12 @@ import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.CaseTemplateOps._ import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.mvc.{Action, AnyContent, Results} +import javax.inject.{Inject, Named, Singleton} import scala.util.{Success, Try} @Singleton @@ -58,7 +59,8 @@ class CaseCtrl @Inject() ( Query[Traversal.V[Case], Traversal.V[Observable]]("observables", (caseSteps, authContext) => caseSteps.observables(authContext)), Query[Traversal.V[Case], Traversal.V[User]]("assignableUsers", (caseSteps, authContext) => caseSteps.assignableUsers(authContext)), Query[Traversal.V[Case], Traversal.V[Organisation]]("organisations", (caseSteps, authContext) => caseSteps.organisations.visible(authContext)), - Query[Traversal.V[Case], Traversal.V[Alert]]("alerts", (caseSteps, authContext) => caseSteps.alert.visible(authContext)) + Query[Traversal.V[Case], Traversal.V[Alert]]("alerts", (caseSteps, authContext) => caseSteps.alert.visible(authContext)), + Query[Traversal.V[Case], Traversal.V[Share]]("shares", (caseSteps, authContext) => caseSteps.shares.visible(authContext)) ) def create: Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index ac556fca70..f1dd620ad6 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -288,6 +288,14 @@ object Conversion { .transform } + implicit val shareOutput: Renderer.Aux[RichShare, OutputShare] = Renderer.toJson[RichShare, OutputShare]( + _.into[OutputShare] + .withFieldComputed(_._id, _.share._id.toString) + .withFieldConst(_._type, "Share") + .withFieldComputed(_.caseId, _.caseId.toString) + .transform + ) + implicit val profileOutput: Renderer.Aux[Profile with Entity, OutputProfile] = Renderer.toJson[Profile with Entity, OutputProfile](profile => profile .asInstanceOf[Profile] diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index 49c9f3e3b2..86388efde6 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -82,7 +82,8 @@ class ObservableCtrl @Inject() ( (observableSteps, authContext) => observableSteps.filteredSimilar.visible(authContext) ), Query[Traversal.V[Observable], Traversal.V[Case]]("case", (observableSteps, _) => observableSteps.`case`), - Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert) + Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert), + Query[Traversal.V[Observable], Traversal.V[Share]]("shares", (observableSteps, authContext) => observableSteps.shares.visible(authContext)) ) def create(caseId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index fae8188f4e..b9c3c8f87d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -21,6 +21,7 @@ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TagOps._ import org.thp.thehive.services.TaskOps._ +import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.libs.json.{JsObject, JsValue, Json} @@ -193,10 +194,7 @@ class Properties @Inject() ( .property("pap", UMapping.int)(_.field.updatable) .property("status", UMapping.enum[CaseStatus.type])(_.field.updatable) .property("summary", UMapping.string.optional)(_.field.updatable) - .property("actionRequired", UMapping.boolean)(_ - .authSelect((t, auth) => t.isActionRequired(auth)) - .readonly - ) + .property("actionRequired", UMapping.boolean)(_.authSelect((t, auth) => t.isActionRequired(auth)).readonly) .property("assignee", UMapping.string.optional)(_.select(_.user.value(_.login)).custom { (_, login, vertex, _, graph, authContext) => for { c <- caseSrv.get(vertex)(graph).getOrFail("Case") @@ -407,6 +405,17 @@ class Properties @Inject() ( .property("permissions", UMapping.string.set)(_.field.updatable) .build + lazy val share: PublicProperties = + PublicPropertyListBuilder[Share] + .property("caseId", UMapping.entityId)(_.select(_.`case`._id).readonly) + .property("caseNumber", UMapping.int)(_.select(_.`case`.value(_.number)).readonly) + .property("organisationId", UMapping.entityId)(_.select(_.organisation._id).readonly) + .property("organisationName", UMapping.string)(_.select(_.organisation.value(_.name)).readonly) + .property("profileId", UMapping.entityId)(_.select(_.profile._id).readonly) + .property("profileName", UMapping.string)(_.select(_.profile.value(_.name)).readonly) + .property("owner", UMapping.boolean)(_.field.readonly) + .build + lazy val task: PublicProperties = PublicPropertyListBuilder[Task] .property("title", UMapping.string)(_.field.updatable) @@ -433,12 +442,9 @@ class Properties @Inject() ( } .map(_ => Json.obj("assignee" -> value)) }) - .property("actionRequired", UMapping.boolean)(_ - .authSelect((t, authContext) => { - t.actionRequired(authContext) - }) - .readonly - ) + .property("actionRequired", UMapping.boolean)(_.authSelect { (t, authContext) => + t.actionRequired(authContext) + }.readonly) .build lazy val log: PublicProperties = diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 82f1ad699a..314dd5ed5f 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -25,7 +25,7 @@ class Router @Inject() ( // permissionCtrl: PermissionCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, - // shareCtrl: ShareCtrl, + shareCtrl: ShareCtrl, userCtrl: UserCtrl, statusCtrl: StatusCtrl // streamCtrl: StreamCtrl, @@ -58,7 +58,7 @@ class Router @Inject() ( case PATCH(p"/observable/_bulk") => observableCtrl.bulkUpdate case PATCH(p"/observable/$observableId") => observableCtrl.update(observableId) // case GET(p"/observable/$observableId/similar") => observableCtrl.findSimilar(observableId) -// case POST(p"/observable/$observableId/shares") => shareCtrl.shareObservable(observableId) + case POST(p"/observable/$observableId/shares") => shareCtrl.shareObservable(observableId) case GET(p"/caseTemplate") => caseTemplateCtrl.list case POST(p"/caseTemplate") => caseTemplateCtrl.create @@ -83,18 +83,25 @@ class Router @Inject() ( case GET(p"/organisation/$organisationId") => organisationCtrl.get(organisationId) case PATCH(p"/organisation/$organisationId") => organisationCtrl.update(organisationId) -// case GET(p"/share") => shareCtrl.list -// case POST(p"/share") => shareCtrl.create -// case GET(p"/share/$shareId") => shareCtrl.get(shareId) -// case PATCH(p"/share/$shareId") => shareCtrl.update(shareId) - - case GET(p"/task") => taskCtrl.list - case POST(p"/task") => taskCtrl.create - case GET(p"/task/$taskId") => taskCtrl.get(taskId) - case PATCH(p"/task/$taskId") => taskCtrl.update(taskId) - case GET(p"/task/$taskId/actionRequired") => taskCtrl.isActionRequired(taskId) - case PUT(p"/task/$taskId/actionRequired/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = true) - case PUT(p"/task/$taskId/actionDone/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = false) + case DELETE(p"/case/shares") => shareCtrl.removeShares() + case POST(p"/case/$caseId/shares") => shareCtrl.shareCase(caseId) + case DELETE(p"/case/$caseId/shares") => shareCtrl.removeShares(caseId) + case DELETE(p"/task/$taskId/shares") => shareCtrl.removeTaskShares(taskId) + case DELETE(p"/observable/$observableId/shares") => shareCtrl.removeObservableShares(observableId) + case GET(p"/case/$caseId/shares") => shareCtrl.listShareCases(caseId) + case GET(p"/case/$caseId/task/$taskId/shares") => shareCtrl.listShareTasks(caseId, taskId) + case GET(p"/case/$caseId/observable/$observableId/shares") => shareCtrl.listShareObservables(caseId, observableId) + case POST(p"/case/task/$taskId/shares") => shareCtrl.shareTask(taskId) + case DELETE(p"/case/share/$shareId") => shareCtrl.removeShare(shareId) + case PATCH(p"/case/share/$shareId") => shareCtrl.updateShare(shareId) + + case GET(p"/task") => taskCtrl.list + case POST(p"/task") => taskCtrl.create + case GET(p"/task/$taskId") => taskCtrl.get(taskId) + case PATCH(p"/task/$taskId") => taskCtrl.update(taskId) + case GET(p"/task/$taskId/actionRequired") => taskCtrl.isActionRequired(taskId) + case PUT(p"/task/$taskId/actionRequired/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = true) + case PUT(p"/task/$taskId/actionDone/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = false) // POST /case/:caseId/task/_search controllers.TaskCtrl.findInCase(caseId) // POST /case/task/_stats controllers.TaskCtrl.stats() diff --git a/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala new file mode 100644 index 0000000000..b905cac510 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/ShareCtrl.scala @@ -0,0 +1,266 @@ +package org.thp.thehive.controllers.v1 + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} +import org.thp.scalligraph.models.Database +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} +import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.scalligraph.{AuthorizationError, BadRequestError, EntityIdOrName, RichSeq} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.{InputShare, ObservablesFilter, TasksFilter} +import org.thp.thehive.models.{Case, Observable, Organisation, Permissions, RichShare, Share, Task} +import org.thp.thehive.services.CaseOps._ +import org.thp.thehive.services.ObservableOps._ +import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.ShareOps._ +import org.thp.thehive.services.TaskOps._ +import org.thp.thehive.services._ +import play.api.mvc.{Action, AnyContent, Results} + +import javax.inject.{Inject, Named} +import scala.util.{Failure, Success, Try} + +class ShareCtrl @Inject() ( + entrypoint: Entrypoint, + shareSrv: ShareSrv, + properties: Properties, + organisationSrv: OrganisationSrv, + caseSrv: CaseSrv, + taskSrv: TaskSrv, + observableSrv: ObservableSrv, + profileSrv: ProfileSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + override val entityName: String = "share" + override val publicProperties: PublicProperties = properties.share + override val initialQuery: Query = + Query.init[Traversal.V[Share]]("listShare", (graph, authContext) => organisationSrv.startTraversal(graph).visible(authContext).shares) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Share], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, shareSteps, _) => shareSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richShare) + ) + override val outputQuery: Query = Query.outputWithContext[RichShare, Traversal.V[Share]]((shareSteps, _) => shareSteps.richShare) + override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Share]]( + "getShare", + FieldsParser[EntityIdOrName], + (idOrName, graph, authContext) => shareSrv.get(idOrName)(graph).visible(authContext) + ) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[Share], Traversal.V[Case]]("case", (shareSteps, _) => shareSteps.`case`), + Query[Traversal.V[Share], Traversal.V[Observable]]("observables", (shareSteps, _) => shareSteps.observables), + Query[Traversal.V[Share], Traversal.V[Task]]("tasks", (shareSteps, _) => shareSteps.tasks), + Query[Traversal.V[Share], Traversal.V[Organisation]]("organisation", (shareSteps, _) => shareSteps.organisation) + ) + + def shareCase(caseId: String): Action[AnyContent] = + entrypoint("create case shares") + .extract("shares", FieldsParser[InputShare].sequence.on("shares")) + .authTransaction(db) { implicit request => implicit graph => + val inputShares: Seq[InputShare] = request.body("shares") + caseSrv + .get(EntityIdOrName(caseId)) + .can(Permissions.manageShare) + .getOrFail("Case") + .flatMap { `case` => + inputShares.toTry { inputShare => + for { + organisation <- + organisationSrv + .get(request.organisation) + .visibleOrganisationsFrom + .get(EntityIdOrName(inputShare.organisationName)) + .getOrFail("Organisation") + profile <- profileSrv.getOrFail(EntityIdOrName(inputShare.profile)) + share <- shareSrv.shareCase(owner = false, `case`, organisation, profile) + richShare <- shareSrv.get(share).richShare.getOrFail("Share") + _ <- if (inputShare.tasks == TasksFilter.all) shareSrv.shareCaseTasks(share) else Success(Nil) + _ <- if (inputShare.observables == ObservablesFilter.all) shareSrv.shareCaseObservables(share) else Success(Nil) + } yield richShare + } + } + .map(shares => Results.Ok(shares.toJson)) + } + + def removeShare(shareId: String): Action[AnyContent] = + entrypoint("remove share") + .authTransaction(db) { implicit request => implicit graph => + doRemoveShare(EntityIdOrName(shareId)).map(_ => Results.NoContent) + } + + def removeShares(): Action[AnyContent] = + entrypoint("remove share") + .extract("shares", FieldsParser[String].sequence.on("ids")) + .authTransaction(db) { implicit request => implicit graph => + val shareIds: Seq[String] = request.body("shares") + shareIds.map(EntityIdOrName.apply).toTry(doRemoveShare(_)).map(_ => Results.NoContent) + } + + def removeShares(caseId: String): Action[AnyContent] = + entrypoint("remove share") + .extract("organisations", FieldsParser[String].sequence.on("organisations")) + .authTransaction(db) { implicit request => implicit graph => + val organisations: Seq[String] = request.body("organisations") + organisations + .map(EntityIdOrName(_)) + .toTry { organisationId => + for { + organisation <- organisationSrv.get(organisationId).getOrFail("Organisation") + _ <- + if (request.organisation.fold(_ == organisation._id, _ == organisation.name)) + Failure(BadRequestError("You cannot remove your own share")) + else Success(()) + shareId <- + caseSrv + .get(EntityIdOrName(caseId)) + .can(Permissions.manageShare) + .share(organisationId) + .has(_.owner, false) + ._id + .orFail(AuthorizationError("Operation not permitted")) + _ <- shareSrv.remove(shareId) + } yield () + } + .map(_ => Results.NoContent) + } + + def removeTaskShares(taskId: String): Action[AnyContent] = + entrypoint("remove share tasks") + .extract("organisations", FieldsParser[String].sequence.on("organisations")) + .authTransaction(db) { implicit request => implicit graph => + val organisations: Seq[String] = request.body("organisations") + + taskSrv + .getOrFail(EntityIdOrName(taskId)) + .flatMap { task => + organisations.toTry { organisationName => + organisationSrv + .getOrFail(EntityIdOrName(organisationName)) + .flatMap(shareSrv.removeShareTasks(task, _)) + } + } + .map(_ => Results.NoContent) + } + + def removeObservableShares(observableId: String): Action[AnyContent] = + entrypoint("remove share observables") + .extract("organisations", FieldsParser[String].sequence.on("organisations")) + .authTransaction(db) { implicit request => implicit graph => + val organisations: Seq[String] = request.body("organisations") + + observableSrv + .getOrFail(EntityIdOrName(observableId)) + .flatMap { observable => + organisations.toTry { organisationName => + organisationSrv + .getOrFail(EntityIdOrName(organisationName)) + .flatMap(shareSrv.removeShareObservable(observable, _)) + } + } + .map(_ => Results.NoContent) + } + + private def doRemoveShare(shareId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + if (!shareSrv.get(shareId).`case`.can(Permissions.manageShare).exists) + Failure(AuthorizationError("You are not authorized to remove share")) + else if (shareSrv.get(shareId).byOrganisation(authContext.organisation).exists) + Failure(AuthorizationError("You can't remove your share")) + else if (shareSrv.get(shareId).has(_.owner, true).exists) + Failure(AuthorizationError("You can't remove initial shares")) + else + shareSrv.remove(shareId) + + def updateShare(shareId: String): Action[AnyContent] = + entrypoint("update share") + .extract("profile", FieldsParser.string.on("profile")) + .authTransaction(db) { implicit request => implicit graph => + val profile: String = request.body("profile") + if (!shareSrv.get(EntityIdOrName(shareId)).`case`.can(Permissions.manageShare).exists) + Failure(AuthorizationError("You are not authorized to remove share")) + for { + richShare <- + shareSrv + .get(EntityIdOrName(shareId)) + .filter(_.organisation.visibleOrganisationsTo.visible) + .richShare + .getOrFail("Share") + profile <- profileSrv.getOrFail(EntityIdOrName(profile)) + _ <- shareSrv.update(richShare.share, profile) + } yield Results.Ok + } + + def listShareCases(caseId: String): Action[AnyContent] = + entrypoint("list case shares") + .authRoTransaction(db) { implicit request => implicit graph => + val shares = caseSrv + .get(EntityIdOrName(caseId)) + .shares + .visible + .filterNot(_.get(request.organisation)) + .richShare + .toSeq + + Success(Results.Ok(shares.toJson)) + } + + def listShareTasks(caseId: String, taskId: String): Action[AnyContent] = + entrypoint("list task shares") + .authRoTransaction(db) { implicit request => implicit graph => + val shares = caseSrv + .get(EntityIdOrName(caseId)) + .can(Permissions.manageShare) + .shares + .visible + .filterNot(_.get(request.organisation)) + .byTask(EntityIdOrName(taskId)) + .richShare + .toSeq + + Success(Results.Ok(shares.toJson)) + } + + def listShareObservables(caseId: String, observableId: String): Action[AnyContent] = + entrypoint("list observable shares") + .authRoTransaction(db) { implicit request => implicit graph => + val shares = caseSrv + .get(EntityIdOrName(caseId)) + .can(Permissions.manageShare) + .shares + .visible + .filterNot(_.get(request.organisation)) + .byObservable(EntityIdOrName(observableId)) + .richShare + .toSeq + + Success(Results.Ok(shares.toJson)) + } + + def shareTask(taskId: String): Action[AnyContent] = + entrypoint("share task") + .extract("organisations", FieldsParser.string.sequence.on("organisations")) + .authTransaction(db) { implicit request => implicit graph => + val organisationIds: Seq[String] = request.body("organisations") + + for { + task <- taskSrv.getOrFail(EntityIdOrName(taskId)) + _ <- taskSrv.get(task).`case`.can(Permissions.manageShare).existsOrFail + organisations <- organisationIds.map(EntityIdOrName(_)).toTry(organisationSrv.get(_).visible.getOrFail("Organisation")) + _ <- shareSrv.addTaskShares(task, organisations) + } yield Results.NoContent + } + + def shareObservable(observableId: String): Action[AnyContent] = + entrypoint("share observable") + .extract("organisations", FieldsParser.string.sequence.on("organisations")) + .authTransaction(db) { implicit request => implicit graph => + val organisationIds: Seq[String] = request.body("organisations") + for { + observable <- observableSrv.getOrFail(EntityIdOrName(observableId)) + _ <- observableSrv.get(observable).`case`.can(Permissions.manageShare).existsOrFail + organisations <- organisationIds.map(EntityIdOrName(_)).toTry(organisationSrv.get(_).visible.getOrFail("Organisation")) + _ <- shareSrv.addObservableShares(observable, organisations) + } yield Results.NoContent + } +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala index 55be07a869..7330f74f89 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala @@ -59,7 +59,8 @@ class TaskCtrl @Inject() ( Query[Traversal.V[Task], Traversal.V[Log]]("logs", (taskSteps, _) => taskSteps.logs), Query[Traversal.V[Task], Traversal.V[Case]]("case", (taskSteps, _) => taskSteps.`case`), Query[Traversal.V[Task], Traversal.V[CaseTemplate]]("caseTemplate", (taskSteps, _) => taskSteps.caseTemplate), - Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext)) + Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext)), + Query[Traversal.V[Task], Traversal.V[Share]]("shares", (taskSteps, authContext) => taskSteps.shares.visible(authContext)) ) def create: Action[AnyContent] = @@ -115,14 +116,14 @@ class TaskCtrl @Inject() ( def isActionRequired(taskId: String): Action[AnyContent] = entrypoint("is action required") - .authTransaction(db){ implicit request => implicit graph => + .authTransaction(db) { implicit request => implicit graph => val actionTraversal = taskSrv.get(EntityIdOrName(taskId)).visible.actionRequiredMap Success(Results.Ok(actionTraversal.toSeq.toMap.toJson)) } def actionRequired(taskId: String, orgaId: String, required: Boolean): Action[AnyContent] = entrypoint("action required") - .authTransaction(db){ implicit request => implicit graph => + .authTransaction(db) { implicit request => implicit graph => for { organisation <- organisationSrv.get(EntityIdOrName(orgaId)).visible.getOrFail("Organisation") task <- taskSrv.get(EntityIdOrName(taskId)).visible.getOrFail("Task") diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index bbc3b86b81..e1f64bf01c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -30,6 +30,7 @@ class TheHiveQueryExecutor @Inject() ( observableTypeCtrl: ObservableTypeCtrl, organisationCtrl: OrganisationCtrl, profileCtrl: ProfileCtrl, + shareCtrl: ShareCtrl, taskCtrl: TaskCtrl, userCtrl: UserCtrl, // dashboardCtrl: DashboardCtrl, @@ -51,6 +52,7 @@ class TheHiveQueryExecutor @Inject() ( organisationCtrl, // pageCtrl, profileCtrl, + shareCtrl, // tagCtrl, taskCtrl, userCtrl diff --git a/thehive/app/org/thp/thehive/services/ShareSrv.scala b/thehive/app/org/thp/thehive/services/ShareSrv.scala index a385687ed3..acf44a3d65 100644 --- a/thehive/app/org/thp/thehive/services/ShareSrv.scala +++ b/thehive/app/org/thp/thehive/services/ShareSrv.scala @@ -149,7 +149,7 @@ class ShareSrv @Inject() ( get(share) .`case` .tasks - .filterNot(_.taskToShares.hasId(share._id)) + .filterNot(_.shares.hasId(share._id)) .toIterator .toTry(shareTaskSrv.create(ShareTask(), share, _)) @@ -213,7 +213,7 @@ class ShareSrv @Inject() ( )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val (orgsToAdd, orgsToRemove) = taskSrv .get(task) - .taskToShares + .shares .organisation .toIterator .foldLeft((organisations.toSet, Set.empty[Organisation with Entity])) { @@ -239,7 +239,7 @@ class ShareSrv @Inject() ( )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val existingOrgs = taskSrv .get(task) - .taskToShares + .shares .organisation .toSeq @@ -326,6 +326,8 @@ object ShareOps { def organisation: Traversal.V[Organisation] = traversal.in[OrganisationShare].v[Organisation] + def visible(implicit authContext: AuthContext): Traversal.V[Share] = traversal.filter(_.organisation.visible) + def tasks: Traversal.V[Task] = traversal.out[ShareTask].v[Task] def byTask(taskId: EntityIdOrName): Traversal.V[Share] = diff --git a/thehive/app/org/thp/thehive/services/TaskSrv.scala b/thehive/app/org/thp/thehive/services/TaskSrv.scala index 5602d2e68a..5f680f339e 100644 --- a/thehive/app/org/thp/thehive/services/TaskSrv.scala +++ b/thehive/app/org/thp/thehive/services/TaskSrv.scala @@ -47,7 +47,7 @@ class TaskSrv @Inject() (caseSrvProvider: Provider[CaseSrv], auditSrv: AuditSrv, get(task).caseTemplate.headOption match { case None => get(task) - .taskToShares + .shares .toIterator .toTry { share => auditSrv @@ -145,7 +145,7 @@ object TaskOps { def can(permission: Permission)(implicit authContext: AuthContext): Traversal.V[Task] = if (authContext.permissions.contains(permission)) - traversal.filter(_.taskToShares.filter(_.profile.has(_.permissions, permission)).organisation.current) + traversal.filter(_.shares.filter(_.profile.has(_.permissions, permission)).organisation.current) else traversal.limit(0) @@ -166,9 +166,9 @@ object TaskOps { def organisations: Traversal.V[Organisation] = traversal.in[ShareTask].in[OrganisationShare].v[Organisation] def organisations(permission: Permission): Traversal.V[Organisation] = - taskToShares.filter(_.profile.has(_.permissions, permission)).organisation + shares.filter(_.profile.has(_.permissions, permission)).organisation - def origin: Traversal.V[Organisation] = taskToShares.has(_.owner, true).organisation + def origin: Traversal.V[Organisation] = shares.has(_.owner, true).organisation def assignableUsers(implicit authContext: AuthContext): Traversal.V[User] = organisations(Permissions.manageTask) @@ -226,7 +226,7 @@ object TaskOps { def unassign(): Unit = traversal.outE[TaskUser].remove() - def taskToShares: Traversal.V[Share] = traversal.in[ShareTask].v[Share] + def shares: Traversal.V[Share] = traversal.in[ShareTask].v[Share] def share(implicit authContext: AuthContext): Traversal.V[Share] = share(authContext.organisation) From e613df2369878a8657cc32f363522ffadc07e871 Mon Sep 17 00:00:00 2001 From: To-om Date: Wed, 6 Jan 2021 17:45:05 +0100 Subject: [PATCH 59/93] #1725 Retry to connect to cassandra until it is ready --- ScalliGraph | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ScalliGraph b/ScalliGraph index ddbc847ef3..2426d2e5f1 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit ddbc847ef30f2507e1287d894ad2191d873a0a87 +Subproject commit 2426d2e5f19ae24ed7c114d5ebd3c902027a6679 From 9f1775d38f4fab682395d11c049e7d416be6ff81 Mon Sep 17 00:00:00 2001 From: To-om Date: Wed, 6 Jan 2021 18:19:15 +0100 Subject: [PATCH 60/93] #1732 Add API for observable creation in an alert --- .../controllers/v0/ObservableCtrl.scala | 93 ++++++++++++++- .../thp/thehive/controllers/v0/Router.scala | 17 +-- .../controllers/v1/ObservableCtrl.scala | 110 +++++++++++++++--- .../controllers/v0/ObservableCtrlTest.scala | 12 +- 4 files changed, 197 insertions(+), 35 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index 1a8ba2b182..c35d18157b 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -12,6 +12,7 @@ import org.thp.scalligraph.traversal.{Converter, IteratorOutput, Traversal} import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.dto.v0.{InputAttachment, InputObservable} import org.thp.thehive.models._ +import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ @@ -37,6 +38,7 @@ class ObservableCtrl @Inject() ( observableSrv: ObservableSrv, observableTypeSrv: ObservableTypeSrv, caseSrv: CaseSrv, + alertSrv: AlertSrv, attachmentSrv: AttachmentSrv, errorHandler: ErrorHandler, @Named("v0") override val queryExecutor: QueryExecutor, @@ -44,8 +46,9 @@ class ObservableCtrl @Inject() ( temporaryFileCreator: DefaultTemporaryFileCreator ) extends ObservableRenderer with QueryCtrl { - def create(caseId: String): Action[AnyContent] = - entrypoint("create artifact") + + def createInCase(caseId: String): Action[AnyContent] = + entrypoint("create artifact in case") .extract("artifact", FieldsParser[InputObservable]) .extract("isZip", FieldsParser.boolean.optional.on("isZip")) .extract("zipPassword", FieldsParser.string.optional.on("zipPassword")) @@ -70,8 +73,8 @@ class ObservableCtrl @Inject() ( case (case0, observableType) => val (successes, failures) = inputAttachObs .flatMap { obs => - obs.attachment.map(createAttachmentObservable(case0, obs, observableType, _)) ++ - obs.data.map(createSimpleObservable(case0, obs, observableType, _)) + obs.attachment.map(createAttachmentObservableInCase(case0, obs, observableType, _)) ++ + obs.data.map(createSimpleObservableInCase(case0, obs, observableType, _)) } .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { case ((s, f), Right(o)) => (s :+ o, f) @@ -82,7 +85,7 @@ class ObservableCtrl @Inject() ( } } - def createSimpleObservable( + private def createSimpleObservableInCase( `case`: Case with Entity, inputObservable: InputObservable, observableType: ObservableType with Entity, @@ -98,7 +101,7 @@ class ObservableCtrl @Inject() ( case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) } - def createAttachmentObservable( + private def createAttachmentObservableInCase( `case`: Case with Entity, inputObservable: InputObservable, observableType: ObservableType with Entity, @@ -122,6 +125,84 @@ class ObservableCtrl @Inject() ( Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) } + def createInAlert(alertId: String): Action[AnyContent] = + entrypoint("create artifact in alert") + .extract("artifact", FieldsParser[InputObservable]) + .extract("isZip", FieldsParser.boolean.optional.on("isZip")) + .extract("zipPassword", FieldsParser.string.optional.on("zipPassword")) + .auth { implicit request => + val inputObservable: InputObservable = request.body("artifact") + val isZip: Option[Boolean] = request.body("isZip") + val zipPassword: Option[String] = request.body("zipPassword") + val inputAttachObs = if (isZip.contains(true)) getZipFiles(inputObservable, zipPassword) else Seq(inputObservable) + + db + .roTransaction { implicit graph => + for { + alert <- + alertSrv + .get(EntityIdOrName(alertId)) + .can(Permissions.manageAlert) + .orFail(AuthorizationError("Operation not permitted")) + observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType)) + } yield (alert, observableType) + } + .map { + case (alert, observableType) => + val (successes, failures) = inputAttachObs + .flatMap { obs => + obs.attachment.map(createAttachmentObservableInAlert(alert, obs, observableType, _)) ++ + obs.data.map(createSimpleObservableInAlert(alert, obs, observableType, _)) + } + .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { + case ((s, f), Right(o)) => (s :+ o, f) + case ((s, f), Left(o)) => (s, f :+ o) + } + if (failures.isEmpty) Results.Created(JsArray(successes)) + else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures)) + } + } + + private def createSimpleObservableInAlert( + alert: Alert with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + data: String + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + observableSrv + .create(inputObservable.toObservable, observableType, data, inputObservable.tags, Nil) + .flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) + } + + private def createAttachmentObservableInAlert( + alert: Alert with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + fileOrAttachment: Either[FFile, InputAttachment] + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + val observable = fileOrAttachment match { + case Left(file) => observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) + case Right(attachment) => + for { + attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id) + obs <- observableSrv.create(inputObservable.toObservable, observableType, attach, inputObservable.tags, Nil) + } yield obs + } + observable.flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case _ => + val filename = fileOrAttachment.fold(_.filename, _.name) + Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) + } + def get(observableId: String): Action[AnyContent] = entrypoint("get observable") .authRoTransaction(db) { implicit request => implicit graph => diff --git a/thehive/app/org/thp/thehive/controllers/v0/Router.scala b/thehive/app/org/thp/thehive/controllers/v0/Router.scala index 050122e10d..1930dbb96d 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Router.scala @@ -77,20 +77,21 @@ class Router @Inject() ( case POST(p"/case/artifact/_search") => observableCtrl.search // case POST(p"/case/:caseId/artifact/_search") => observableCtrl.findInCase(caseId) case POST(p"/case/artifact/_stats") => observableCtrl.stats - case POST(p"/case/$caseId/artifact") => observableCtrl.create(caseId) // Audit ok + case POST(p"/case/$caseId/artifact") => observableCtrl.createInCase(caseId) // Audit ok + case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok case GET(p"/case/artifact/$observableId") => observableCtrl.get(observableId) - case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok - case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok - case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok + case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok + case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok + case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok case GET(p"/case/artifact/$observableId/similar") => observableCtrl.findSimilar(observableId) case POST(p"/case/artifact/$observableId/shares") => shareCtrl.shareObservable(observableId) case GET(p"/case") => caseCtrl.search - case POST(p"/case") => caseCtrl.create // Audit ok + case POST(p"/case") => caseCtrl.create // Audit ok case GET(p"/case/$caseId") => caseCtrl.get(caseId) - case PATCH(p"/case/_bulk") => caseCtrl.bulkUpdate // Not used by the frontend - case PATCH(p"/case/$caseId") => caseCtrl.update(caseId) // Audit ok - case POST(p"/case/_merge/$caseIds") => caseCtrl.merge(caseIds) // Not implemented in backend and not used by frontend + case PATCH(p"/case/_bulk") => caseCtrl.bulkUpdate // Not used by the frontend + case PATCH(p"/case/$caseId") => caseCtrl.update(caseId) // Audit ok + case POST(p"/case/_merge/$caseIds") => caseCtrl.merge(caseIds) // Not implemented in backend and not used by frontend case POST(p"/case/_search") => caseCtrl.search case POST(p"/case/_stats") => caseCtrl.stats case DELETE(p"/case/$caseId") => caseCtrl.delete(caseId) // Not used by the frontend diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index e41f8822f7..13ce290bce 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -12,6 +12,7 @@ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.{InputAttachment, InputObservable} import org.thp.thehive.models._ +import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ @@ -30,12 +31,13 @@ import scala.util.{Failure, Success} @Singleton class ObservableCtrl @Inject() ( - entryPoint: Entrypoint, + entrypoint: Entrypoint, @Named("with-thehive-schema") db: Database, properties: Properties, observableSrv: ObservableSrv, observableTypeSrv: ObservableTypeSrv, caseSrv: CaseSrv, + alertSrv: AlertSrv, organisationSrv: OrganisationSrv, attachmentSrv: AttachmentSrv, errorHandler: ErrorHandler, @@ -82,8 +84,8 @@ class ObservableCtrl @Inject() ( Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert) ) - def create(caseId: String): Action[AnyContent] = - entryPoint("create artifact") + def createInCase(caseId: String): Action[AnyContent] = + entrypoint("create artifact in case") .extract("artifact", FieldsParser[InputObservable]) .extract("isZip", FieldsParser.boolean.optional.on("isZip")) .extract("zipPassword", FieldsParser.string.optional.on("zipPassword")) @@ -108,8 +110,8 @@ class ObservableCtrl @Inject() ( case (case0, observableType) => val (successes, failures) = inputAttachObs .flatMap { obs => - obs.attachment.map(createAttachmentObservable(case0, obs, observableType, _)) ++ - obs.data.map(createSimpleObservable(case0, obs, observableType, _)) + obs.attachment.map(createAttachmentObservableInCase(case0, obs, observableType, _)) ++ + obs.data.map(createSimpleObservableInCase(case0, obs, observableType, _)) } .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { case ((s, f), Right(o)) => (s :+ o, f) @@ -120,7 +122,7 @@ class ObservableCtrl @Inject() ( } } - def createSimpleObservable( + private def createSimpleObservableInCase( `case`: Case with Entity, inputObservable: InputObservable, observableType: ObservableType with Entity, @@ -136,7 +138,7 @@ class ObservableCtrl @Inject() ( case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) } - def createAttachmentObservable( + private def createAttachmentObservableInCase( `case`: Case with Entity, inputObservable: InputObservable, observableType: ObservableType with Entity, @@ -160,12 +162,90 @@ class ObservableCtrl @Inject() ( Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) } + def createInAlert(alertId: String): Action[AnyContent] = + entrypoint("create artifact in alert") + .extract("artifact", FieldsParser[InputObservable]) + .extract("isZip", FieldsParser.boolean.optional.on("isZip")) + .extract("zipPassword", FieldsParser.string.optional.on("zipPassword")) + .auth { implicit request => + val inputObservable: InputObservable = request.body("artifact") + val isZip: Option[Boolean] = request.body("isZip") + val zipPassword: Option[String] = request.body("zipPassword") + val inputAttachObs = if (isZip.contains(true)) getZipFiles(inputObservable, zipPassword) else Seq(inputObservable) + + db + .roTransaction { implicit graph => + for { + alert <- + alertSrv + .get(EntityIdOrName(alertId)) + .can(Permissions.manageAlert) + .orFail(AuthorizationError("Operation not permitted")) + observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType)) + } yield (alert, observableType) + } + .map { + case (alert, observableType) => + val (successes, failures) = inputAttachObs + .flatMap { obs => + obs.attachment.map(createAttachmentObservableInAlert(alert, obs, observableType, _)) ++ + obs.data.map(createSimpleObservableInAlert(alert, obs, observableType, _)) + } + .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { + case ((s, f), Right(o)) => (s :+ o, f) + case ((s, f), Left(o)) => (s, f :+ o) + } + if (failures.isEmpty) Results.Created(JsArray(successes)) + else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures)) + } + } + + private def createSimpleObservableInAlert( + alert: Alert with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + data: String + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + observableSrv + .create(inputObservable.toObservable, observableType, data, inputObservable.tags, Nil) + .flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) + } + + private def createAttachmentObservableInAlert( + alert: Alert with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + fileOrAttachment: Either[FFile, InputAttachment] + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + val observable = fileOrAttachment match { + case Left(file) => observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) + case Right(attachment) => + for { + attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id) + obs <- observableSrv.create(inputObservable.toObservable, observableType, attach, inputObservable.tags, Nil) + } yield obs + } + observable.flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case _ => + val filename = fileOrAttachment.fold(_.filename, _.name) + Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) + } + def get(observableId: String): Action[AnyContent] = - entryPoint("get observable") - .authRoTransaction(db) { _ => implicit graph => + entrypoint("get observable") + .authRoTransaction(db) { implicit request => implicit graph => observableSrv .get(EntityIdOrName(observableId)) - // .availableFor(request.organisation) + .visible .richObservable .getOrFail("Observable") .map { observable => @@ -174,7 +254,7 @@ class ObservableCtrl @Inject() ( } def update(observableId: String): Action[AnyContent] = - entryPoint("update observable") + entrypoint("update observable") .extract("observable", FieldsParser.update("observable", publicProperties)) .authTransaction(db) { implicit request => implicit graph => val propertyUpdaters: Seq[PropertyUpdater] = request.body("observable") @@ -187,7 +267,7 @@ class ObservableCtrl @Inject() ( } def bulkUpdate: Action[AnyContent] = - entryPoint("bulk update") + entrypoint("bulk update") .extract("input", FieldsParser.update("observable", publicProperties)) .extract("ids", FieldsParser.seq[String].on("ids")) .authTransaction(db) { implicit request => implicit graph => @@ -201,13 +281,13 @@ class ObservableCtrl @Inject() ( .map(_ => Results.NoContent) } - def delete(obsId: String): Action[AnyContent] = - entryPoint("delete") + def delete(observableId: String): Action[AnyContent] = + entrypoint("delete") .authTransaction(db) { implicit request => implicit graph => for { observable <- observableSrv - .get(EntityIdOrName(obsId)) + .get(EntityIdOrName(observableId)) .can(Permissions.manageObservable) .getOrFail("Observable") _ <- observableSrv.remove(observable) diff --git a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala index c76dc02c1a..7b68d655f9 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala @@ -57,7 +57,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { "data":["multi","line","test"] } """.stripMargin)) - val result = app[ObservableCtrl].create("1")(request) + val result = app[ObservableCtrl].createInCase("1")(request) status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") val createdObservables = contentAsJson(result).as[Seq[OutputObservable]] @@ -84,7 +84,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { "data":["observable", "in", "array"] } """.stripMargin)) - val result = app[ObservableCtrl].create("1")(request) + val result = app[ObservableCtrl].createInCase("1")(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") @@ -160,7 +160,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { Headers("user" -> "certuser@thehive.local"), body = AnyContentAsMultipartFormData(MultipartFormData(dataParts, files, Nil)) ) - val result = app[ObservableCtrl].create("1")(request) + val result = app[ObservableCtrl].createInCase("1")(request) status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") val createdObservables = contentAsJson(result).as[Seq[OutputObservable]] @@ -219,7 +219,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { "data":"localhost" } """)) - val result1 = app[ObservableCtrl].create("1")(request1) + val result1 = app[ObservableCtrl].createInCase("1")(request1) status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}") getData("localhost", app) must have size 1 @@ -233,7 +233,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { "data":"localhost" } """)) - val result2 = app[ObservableCtrl].create("2")(request2) + val result2 = app[ObservableCtrl].createInCase("2")(request2) status(result2) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result2)}") getData("localhost", app) must have size 1 @@ -273,7 +273,7 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { "data":"${UUID.randomUUID()}\\n${UUID.randomUUID()}" } """)) - val result = observableCtrl.create("1")(request) + val result = observableCtrl.createInCase("1")(request) status(result) shouldEqual 201 contentAsJson(result).as[Seq[OutputObservable]] From 458de0e3962159591752a8fdc128bd5f0dc5c576 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 07:55:39 +0100 Subject: [PATCH 61/93] #1732 Add API for observable update in an alert --- .../org/thp/thehive/controllers/v0/ObservableCtrl.scala | 4 ++-- thehive/app/org/thp/thehive/controllers/v0/Router.scala | 4 +++- .../org/thp/thehive/controllers/v1/ObservableCtrl.scala | 7 ++----- thehive/app/org/thp/thehive/services/ObservableSrv.scala | 7 +++++++ 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index c35d18157b..bb8fb1d893 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -223,7 +223,7 @@ class ObservableCtrl @Inject() ( val propertyUpdaters: Seq[PropertyUpdater] = request.body("observable") observableSrv .update( - _.get(EntityIdOrName(observableId)).can(Permissions.manageObservable), + _.get(EntityIdOrName(observableId)).canManage, propertyUpdaters ) .flatMap { @@ -259,7 +259,7 @@ class ObservableCtrl @Inject() ( ids .toTry { id => observableSrv - .update(_.get(EntityIdOrName(id)).can(Permissions.manageObservable), properties) + .update(_.get(EntityIdOrName(id)).canManage, properties) } .map(_ => Results.NoContent) } diff --git a/thehive/app/org/thp/thehive/controllers/v0/Router.scala b/thehive/app/org/thp/thehive/controllers/v0/Router.scala index 1930dbb96d..80cd24e4eb 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Router.scala @@ -78,13 +78,15 @@ class Router @Inject() ( // case POST(p"/case/:caseId/artifact/_search") => observableCtrl.findInCase(caseId) case POST(p"/case/artifact/_stats") => observableCtrl.stats case POST(p"/case/$caseId/artifact") => observableCtrl.createInCase(caseId) // Audit ok - case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok case GET(p"/case/artifact/$observableId") => observableCtrl.get(observableId) case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok case GET(p"/case/artifact/$observableId/similar") => observableCtrl.findSimilar(observableId) case POST(p"/case/artifact/$observableId/shares") => shareCtrl.shareObservable(observableId) + case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok + case PATCH(p"/alert/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok + case PATCH(p"/alert/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok case GET(p"/case") => caseCtrl.search case POST(p"/case") => caseCtrl.create // Audit ok diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index 13ce290bce..8cf373d168 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -259,10 +259,7 @@ class ObservableCtrl @Inject() ( .authTransaction(db) { implicit request => implicit graph => val propertyUpdaters: Seq[PropertyUpdater] = request.body("observable") observableSrv - .update( - _.get(EntityIdOrName(observableId)).can(Permissions.manageObservable), - propertyUpdaters - ) + .update(_.get(EntityIdOrName(observableId)).canManage, propertyUpdaters) .map(_ => Results.NoContent) } @@ -276,7 +273,7 @@ class ObservableCtrl @Inject() ( ids .toTry { id => observableSrv - .update(_.get(EntityIdOrName(id)).can(Permissions.manageObservable), properties) + .update(_.get(EntityIdOrName(id)).canManage, properties) } .map(_ => Results.NoContent) } diff --git a/thehive/app/org/thp/thehive/services/ObservableSrv.scala b/thehive/app/org/thp/thehive/services/ObservableSrv.scala index b95f40f7aa..e63e74016b 100644 --- a/thehive/app/org/thp/thehive/services/ObservableSrv.scala +++ b/thehive/app/org/thp/thehive/services/ObservableSrv.scala @@ -15,6 +15,7 @@ import org.thp.scalligraph.traversal.{Converter, StepLabel, Traversal} import org.thp.scalligraph.utils.Hash import org.thp.scalligraph.{EntityIdOrName, RichSeq} import org.thp.thehive.models._ +import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.ShareOps._ @@ -229,6 +230,12 @@ object ObservableOps { else traversal.limit(0) + def canManage(implicit authContext: AuthContext): Traversal.V[Observable] = + if (authContext.isPermitted(Permissions.manageAlert)) + traversal.filter(_.or(_.alert.visible, _.can(Permissions.manageObservable))) + else + can(Permissions.manageObservable) + def userPermissions(implicit authContext: AuthContext): Traversal[Set[Permission], Vertex, Converter[Set[Permission], Vertex]] = traversal .share(authContext.organisation) From 762ead3f0ef11f9581e855919d8b34f6d6cd06e4 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 08:11:20 +0100 Subject: [PATCH 62/93] #1732 Add API for observable deletion from an alert --- thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala | 2 +- thehive/app/org/thp/thehive/controllers/v0/Router.scala | 1 + thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index bb8fb1d893..21af8c47e5 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -271,7 +271,7 @@ class ObservableCtrl @Inject() ( observable <- observableSrv .get(EntityIdOrName(observableId)) - .can(Permissions.manageObservable) + .canManage .getOrFail("Observable") _ <- observableSrv.remove(observable) } yield Results.NoContent diff --git a/thehive/app/org/thp/thehive/controllers/v0/Router.scala b/thehive/app/org/thp/thehive/controllers/v0/Router.scala index 80cd24e4eb..e29481df34 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Router.scala @@ -87,6 +87,7 @@ class Router @Inject() ( case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok case PATCH(p"/alert/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok case PATCH(p"/alert/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok + case DELETE(p"/alert/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok case GET(p"/case") => caseCtrl.search case POST(p"/case") => caseCtrl.create // Audit ok diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index 8cf373d168..706c36481d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -285,7 +285,7 @@ class ObservableCtrl @Inject() ( observable <- observableSrv .get(EntityIdOrName(observableId)) - .can(Permissions.manageObservable) + .canManage .getOrFail("Observable") _ <- observableSrv.remove(observable) } yield Results.NoContent From abfb9c05040bc1fa4a03ec2a6560f5e8103b9751 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 08:30:50 +0100 Subject: [PATCH 63/93] #1733 Add alert import date as searchable property --- .../thp/thehive/controllers/v0/AlertCtrl.scala | 1 + .../thp/thehive/controllers/v1/Properties.scala | 15 +++++---------- .../app/org/thp/thehive/services/AlertSrv.scala | 9 ++++----- 3 files changed, 10 insertions(+), 15 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala index d0ba0e18fe..b09d0bbabe 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala @@ -450,5 +450,6 @@ class PublicAlert @Inject() ( case _ => Failure(BadRequestError("Invalid custom fields format")) }) .property("case", db.idMapping)(_.select(_.`case`._id).readonly) + .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index fae8188f4e..7bfa9cf7db 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -142,6 +142,7 @@ class Properties @Inject() ( } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) + .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) .build lazy val audit: PublicProperties = @@ -193,10 +194,7 @@ class Properties @Inject() ( .property("pap", UMapping.int)(_.field.updatable) .property("status", UMapping.enum[CaseStatus.type])(_.field.updatable) .property("summary", UMapping.string.optional)(_.field.updatable) - .property("actionRequired", UMapping.boolean)(_ - .authSelect((t, auth) => t.isActionRequired(auth)) - .readonly - ) + .property("actionRequired", UMapping.boolean)(_.authSelect((t, auth) => t.isActionRequired(auth)).readonly) .property("assignee", UMapping.string.optional)(_.select(_.user.value(_.login)).custom { (_, login, vertex, _, graph, authContext) => for { c <- caseSrv.get(vertex)(graph).getOrFail("Case") @@ -433,12 +431,9 @@ class Properties @Inject() ( } .map(_ => Json.obj("assignee" -> value)) }) - .property("actionRequired", UMapping.boolean)(_ - .authSelect((t, authContext) => { - t.actionRequired(authContext) - }) - .readonly - ) + .property("actionRequired", UMapping.boolean)(_.authSelect { (t, authContext) => + t.actionRequired(authContext) + }.readonly) .build lazy val log: PublicProperties = diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index 0d7de34c4b..7a9af761eb 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -1,6 +1,5 @@ package org.thp.thehive.services -import org.apache.tinkerpop.gremlin.process.traversal.P import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.{AuthContext, Permission} import org.thp.scalligraph.models._ @@ -399,10 +398,10 @@ object AlertOps { else traversal.limit(0) def imported: Traversal[Boolean, Boolean, IdentityConverter[Boolean]] = - traversal - .`case` - .count - .choose(_.is(P.gt(0)), onTrue = true, onFalse = false) + traversal.choose(_.outE[AlertCase], onTrue = true, onFalse = false) + + def importDate: Traversal[Date, Date, Converter[Date, Date]] = + traversal.outE[AlertCase].value(_._createdAt) def similarCases(maybeCaseFilter: Option[Traversal.V[Case] => Traversal.V[Case]])(implicit authContext: AuthContext From 20e7add64fdec636c4b07860f594434f75800a95 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 08:43:46 +0100 Subject: [PATCH 64/93] #1733 Add alert import date in extraData --- .../controllers/v1/AlertRenderer.scala | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala index ac257b06e7..935796ad4e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala @@ -1,7 +1,6 @@ package org.thp.thehive.controllers.v1 -import java.util.{List => JList, Map => JMap} - +import java.util.{Date, List => JList, Map => JMap} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{Converter, Traversal} @@ -39,12 +38,20 @@ trait AlertRenderer extends BaseRenderer[Alert] { _.similarCases(None).fold.domainMap(sc => JsArray(sc.sorted.map(Json.toJson(_)))) } - def alertStatsRenderer(extraData: Set[String])( - implicit authContext: AuthContext + def importDate: Traversal.V[Alert] => Traversal[JsValue, JList[Date], Converter[JsValue, JList[Date]]] = + _.importDate.fold.domainMap(_.headOption.fold[JsValue](JsNull)(d => JsNumber(d.getTime))) + + def alertStatsRenderer(extraData: Set[String])(implicit + authContext: AuthContext ): Traversal.V[Alert] => JsTraversal = { implicit traversal => - baseRenderer(extraData, traversal, { - case (f, "similarCases") => addData("similarCases", f)(similarCasesStats) - case (f, _) => f - }) + baseRenderer( + extraData, + traversal, + { + case (f, "similarCases") => addData("similarCases", f)(similarCasesStats) + case (f, "importDate") => addData("importDate", f)(importDate) + case (f, _) => f + } + ) } } From 8f1028a63eabf28711cedac2d758e59a1fde102f Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 09:26:53 +0100 Subject: [PATCH 65/93] #1734 Add handlingDuration properties in alert --- ScalliGraph | 2 +- .../org/thp/thehive/controllers/v0/AlertCtrl.scala | 10 +++++++--- .../org/thp/thehive/controllers/v1/Properties.scala | 9 +++++++-- thehive/app/org/thp/thehive/services/AlertSrv.scala | 12 ++++++++++++ 4 files changed, 27 insertions(+), 6 deletions(-) diff --git a/ScalliGraph b/ScalliGraph index 2426d2e5f1..e9b3180098 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit 2426d2e5f19ae24ed7c114d5ebd3c902027a6679 +Subproject commit e9b31800985bc83dd8cda20496f42a96d5236f21 diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala index b09d0bbabe..c7331860b7 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala @@ -1,9 +1,6 @@ package org.thp.thehive.controllers.v0 -import java.util.{Base64, List => JList, Map => JMap} - import io.scalaland.chimney.dsl._ -import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ @@ -27,6 +24,8 @@ import org.thp.thehive.services._ import play.api.libs.json.{JsArray, JsObject, Json} import play.api.mvc.{Action, AnyContent, Results} +import java.util.{Base64, List => JList, Map => JMap} +import javax.inject.{Inject, Named, Singleton} import scala.util.{Failure, Success, Try} @Singleton @@ -451,5 +450,10 @@ class PublicAlert @Inject() ( }) .property("case", db.idMapping)(_.select(_.`case`._id).readonly) .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) + .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) + .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 7bfa9cf7db..8295730f8a 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -125,7 +125,7 @@ class Properties @Inject() ( case CustomFieldType.integer => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Long] } case CustomFieldType.string => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[String] } } - .getOrElse(new Converter[Any, JsValue] { def apply(x: JsValue): Any = x }) + .getOrElse((x: JsValue) => x) case _ => (x: JsValue) => x } .custom { @@ -143,6 +143,11 @@ class Properties @Inject() ( case _ => Failure(BadRequestError("Invalid custom fields format")) }) .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) + .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) + .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly) .build lazy val audit: PublicProperties = @@ -259,7 +264,7 @@ class Properties @Inject() ( case CustomFieldType.integer => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Long] } case CustomFieldType.string => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[String] } } - .getOrElse(new Converter[Any, JsValue] { def apply(x: JsValue): Any = x }) + .getOrElse((x: JsValue) => x) case _ => (x: JsValue) => x } .custom { diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index 7a9af761eb..082fa64ee6 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -403,6 +403,18 @@ object AlertOps { def importDate: Traversal[Date, Date, Converter[Date, Date]] = traversal.outE[AlertCase].value(_._createdAt) + def handlingDuration: Traversal[Long, Long, IdentityConverter[Long]] = + traversal.coalesceIdent( + _.filter(_.outE[AlertCase]) + .sack( + (_: JLong, importDate: JLong) => importDate, + _.by(_.importDate.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) + ) + .sack((_: Long) - (_: JLong), _.by(_._createdAt.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) + .sack[Long], + _.constant(0L) + ) + def similarCases(maybeCaseFilter: Option[Traversal.V[Case] => Traversal.V[Case]])(implicit authContext: AuthContext ): Traversal[(RichCase, SimilarStats), JMap[String, Any], Converter[(RichCase, SimilarStats), JMap[String, Any]]] = { From bfda484e0c5b4ed069f3ea88ddf5682cfef0abe4 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 09:33:34 +0100 Subject: [PATCH 66/93] #1734 Rewrite case handling duration --- .../thp/thehive/controllers/v0/CaseCtrl.scala | 65 ++----------------- .../thehive/controllers/v1/Properties.scala | 65 ++----------------- .../org/thp/thehive/services/CaseSrv.scala | 16 ++++- 3 files changed, 23 insertions(+), 123 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala index f8fcd8aeed..3e6782c2ff 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala @@ -345,66 +345,11 @@ class PublicCase @Inject() ( } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) - .property("computed.handlingDurationInDays", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(86400000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInHours", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(3600000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInMinutes", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(60000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInSeconds", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(1000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) + .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) + .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly) .property("viewingOrganisation", UMapping.string)( _.authSelect((cases, authContext) => cases.organisations.visible(authContext).value(_.name)).readonly ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 8295730f8a..d600a93b5f 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -281,66 +281,11 @@ class Properties @Inject() ( } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) - .property("computed.handlingDurationInDays", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(86400000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInHours", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(3600000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInMinutes", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(60000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) - .property("computed.handlingDurationInSeconds", UMapping.long)( - _.select( - _.coalesceIdent( - _.has(_.endDate) - .sack( - (_: JLong, endDate: JLong) => endDate, - _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) - ) - .sack((_: Long) - (_: JLong), _.by(_.value(_.startDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) - .sack((_: Long) / (_: Long), _.by(_.constant(1000L))) - .sack[Long], - _.constant(0L) - ) - ).readonly - ) + .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) + .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInMinutes", UMapping.long)(_.select(_.handlingDuration.math("_ / 60000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInHours", UMapping.long)(_.select(_.handlingDuration.math("_ / 3600000").domainMap(_.toLong)).readonly) + .property("computed.handlingDurationInDays", UMapping.long)(_.select(_.handlingDuration.math("_ / 86400000").domainMap(_.toLong)).readonly) .property("viewingOrganisation", UMapping.string)( _.authSelect((cases, authContext) => cases.organisations.visible(authContext).value(_.name)).readonly ) diff --git a/thehive/app/org/thp/thehive/services/CaseSrv.scala b/thehive/app/org/thp/thehive/services/CaseSrv.scala index 291a1147af..d29415c808 100644 --- a/thehive/app/org/thp/thehive/services/CaseSrv.scala +++ b/thehive/app/org/thp/thehive/services/CaseSrv.scala @@ -1,6 +1,7 @@ package org.thp.thehive.services import java.util.{Map => JMap} +import java.lang.{Long => JLong} import akka.actor.ActorRef import javax.inject.{Inject, Named, Singleton} @@ -12,7 +13,7 @@ import org.thp.scalligraph.models._ import org.thp.scalligraph.query.PropertyUpdater import org.thp.scalligraph.services._ import org.thp.scalligraph.traversal.TraversalOps._ -import org.thp.scalligraph.traversal.{Converter, StepLabel, Traversal} +import org.thp.scalligraph.traversal.{Converter, IdentityConverter, StepLabel, Traversal} import org.thp.scalligraph.{CreateError, EntityIdOrName, EntityName, RichOptionTry, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputCustomFieldValue @@ -557,9 +558,18 @@ object CaseOps { def isActionRequired(implicit authContext: AuthContext): Traversal[Boolean, Boolean, Converter.Identity[Boolean]] = traversal.choose(_.share(authContext).outE[ShareTask].has(_.actionRequired, true), true, false) + def handlingDuration: Traversal[Long, Long, IdentityConverter[Long]] = + traversal.coalesceIdent( + _.has(_.endDate) + .sack( + (_: JLong, importDate: JLong) => importDate, + _.by(_.value(_.endDate).graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long)) + ) + .sack((_: Long) - (_: JLong), _.by(_._createdAt.graphMap[Long, JLong, Converter[Long, JLong]](_.getTime, Converter.long))) + .sack[Long], + _.constant(0L) + ) } - -// implicit class CaseCustomFieldsOpsDefs(traversal: Traversal.E[CaseCustomField]) extends CustomFieldValueOpsDefs(traversal) } class CaseIntegrityCheckOps @Inject() (@Named("with-thehive-schema") val db: Database, val service: CaseSrv) extends IntegrityCheckOps[Case] { From 6f87452ab00c141ca75d56845b5e3ead498df37c Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 17:52:22 +0100 Subject: [PATCH 67/93] #1734 Fix mean return type --- ScalliGraph | 2 +- thehive/app/org/thp/thehive/services/th3/Aggregation.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ScalliGraph b/ScalliGraph index e9b3180098..acc3bea235 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit e9b31800985bc83dd8cda20496f42a96d5236f21 +Subproject commit acc3bea235989e57f8b43fd0780f6905437cc9e2 diff --git a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala index 358d5a744b..79d89cf159 100644 --- a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala +++ b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala @@ -186,7 +186,8 @@ case class AggAvg(aggName: Option[String], fieldName: String) extends Aggregatio property .select(fieldPath, t, authContext) .mean - .domainMap(avg => Output(Json.obj(name -> avg.asInstanceOf[Double]))), + .domainMap(avg => Output(Json.obj(name -> avg))) + .asInstanceOf[Traversal.Domain[Output[_]]], Output(Json.obj(name -> JsNull)) ) } From b676098f190ef54d31f9a7d3ed357e0200865148 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 7 Jan 2021 17:59:25 +0100 Subject: [PATCH 68/93] #1734 Add imported property in alert --- thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala | 1 + thehive/app/org/thp/thehive/controllers/v1/Properties.scala | 2 ++ 2 files changed, 3 insertions(+) diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala index c7331860b7..9f1638998d 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala @@ -449,6 +449,7 @@ class PublicAlert @Inject() ( case _ => Failure(BadRequestError("Invalid custom fields format")) }) .property("case", db.idMapping)(_.select(_.`case`._id).readonly) + .property("imported", UMapping.boolean)(_.select(_.imported).readonly) .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index d600a93b5f..f842c34564 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -142,6 +142,8 @@ class Properties @Inject() ( } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) + .property("case", db.idMapping)(_.select(_.`case`._id).readonly) + .property("imported", UMapping.boolean)(_.select(_.imported).readonly) .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) .property("computed.handlingDuration", UMapping.long)(_.select(_.handlingDuration).readonly) .property("computed.handlingDurationInSeconds", UMapping.long)(_.select(_.handlingDuration.math("_ / 1000").domainMap(_.toLong)).readonly) From 911c063dbe64c6ce16eef5415ae73b14dc4214ac Mon Sep 17 00:00:00 2001 From: To-om Date: Fri, 8 Jan 2021 09:35:04 +0100 Subject: [PATCH 69/93] #1732 Add support of base64 format for alert observable --- .../controllers/v0/ObservableCtrl.scala | 78 +++++++++++++++---- 1 file changed, 61 insertions(+), 17 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index 21af8c47e5..ca56c37fad 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -28,7 +28,10 @@ import java.io.FilterInputStream import java.nio.file.Files import javax.inject.{Inject, Named, Singleton} import scala.collection.JavaConverters._ -import scala.util.{Failure, Success} +import scala.util.{Failure, Success, Try} +import shapeless._ + +import java.util.Base64 @Singleton class ObservableCtrl @Inject() ( @@ -47,6 +50,8 @@ class ObservableCtrl @Inject() ( ) extends ObservableRenderer with QueryCtrl { + type AnyAttachmentType = InputAttachment :+: FFile :+: String :+: CNil + def createInCase(caseId: String): Action[AnyContent] = entrypoint("create artifact in case") .extract("artifact", FieldsParser[InputObservable]) @@ -71,11 +76,14 @@ class ObservableCtrl @Inject() ( } .map { case (case0, observableType) => - val (successes, failures) = inputAttachObs - .flatMap { obs => - obs.attachment.map(createAttachmentObservableInCase(case0, obs, observableType, _)) ++ - obs.data.map(createSimpleObservableInCase(case0, obs, observableType, _)) - } + val successesAndFailures = + if (observableType.isAttachment) + inputAttachObs + .flatMap(obs => obs.attachment.map(createAttachmentObservableInCase(case0, obs, observableType, _))) + else + inputAttachObs + .flatMap(obs => obs.data.map(createSimpleObservableInCase(case0, obs, observableType, _))) + val (successes, failures) = successesAndFailures .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { case ((s, f), Right(o)) => (s :+ o, f) case ((s, f), Left(o)) => (s, f :+ o) @@ -149,11 +157,18 @@ class ObservableCtrl @Inject() ( } .map { case (alert, observableType) => - val (successes, failures) = inputAttachObs - .flatMap { obs => - obs.attachment.map(createAttachmentObservableInAlert(alert, obs, observableType, _)) ++ - obs.data.map(createSimpleObservableInAlert(alert, obs, observableType, _)) - } + val successesAndFailures = + if (observableType.isAttachment) + inputAttachObs + .flatMap { obs => + (obs.attachment.map(_.fold(Coproduct[AnyAttachmentType](_), Coproduct[AnyAttachmentType](_))) ++ + obs.data.map(Coproduct[AnyAttachmentType](_))) + .map(createAttachmentObservableInAlert(alert, obs, observableType, _)) + } + else + inputAttachObs + .flatMap(obs => obs.data.map(createSimpleObservableInAlert(alert, obs, observableType, _))) + val (successes, failures) = successesAndFailures .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { case ((s, f), Right(o)) => (s :+ o, f) case ((s, f), Left(o)) => (s, f :+ o) @@ -183,23 +198,52 @@ class ObservableCtrl @Inject() ( alert: Alert with Entity, inputObservable: InputObservable, observableType: ObservableType with Entity, - fileOrAttachment: Either[FFile, InputAttachment] + attachment: AnyAttachmentType )(implicit authContext: AuthContext): Either[JsValue, JsValue] = db .tryTransaction { implicit graph => - val observable = fileOrAttachment match { - case Left(file) => observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) - case Right(attachment) => + object createAttachment extends Poly1 { + implicit val fromFile: Case.Aux[FFile, Try[RichObservable]] = at[FFile] { file => + observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) + } + implicit val fromAttachment: Case.Aux[InputAttachment, Try[RichObservable]] = at[InputAttachment] { attachment => for { attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id) obs <- observableSrv.create(inputObservable.toObservable, observableType, attach, inputObservable.tags, Nil) } yield obs + } + + implicit val fromString: Case.Aux[String, Try[RichObservable]] = at[String] { data => + data.split(';') match { + case Array(filename, contentType, value) => + val data = Base64.getDecoder.decode(value) + attachmentSrv + .create(filename, contentType, data) + .flatMap(attachment => observableSrv.create(inputObservable.toObservable, observableType, attachment, inputObservable.tags, Nil)) + case Array(filename, contentType) => + attachmentSrv + .create(filename, contentType, Array.emptyByteArray) + .flatMap(attachment => observableSrv.create(inputObservable.toObservable, observableType, attachment, inputObservable.tags, Nil)) + case data => + Failure(InvalidFormatAttributeError("artifacts.data", "filename;contentType;base64value", Set.empty, FString(data.mkString(";")))) + } + } } - observable.flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) + + attachment + .fold(createAttachment) + .flatMap(o => alertSrv.addObservable(alert, o).map(_ => o)) } match { case Success(o) => Right(o.toJson) case _ => - val filename = fileOrAttachment.fold(_.filename, _.name) + object attachmentName extends Poly1 { + implicit val fromFile: Case.Aux[FFile, String] = at[FFile](_.filename) + implicit val fromAttachment: Case.Aux[InputAttachment, String] = at[InputAttachment](_.name) + implicit val fromString: Case.Aux[String, String] = at[String] { data => + if (data.contains(';')) data.takeWhile(_ != ';') else "no name" + } + } + val filename = attachment.fold(attachmentName) Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) } From 90bc1900658d4087806bbbe60377820a4072f5f2 Mon Sep 17 00:00:00 2001 From: To-om Date: Fri, 8 Jan 2021 12:24:55 +0100 Subject: [PATCH 70/93] #1732 Fix tests --- .../controllers/v0/ObservableCtrlTest.scala | 63 ++++++++++++++++--- 1 file changed, 54 insertions(+), 9 deletions(-) diff --git a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala index 7b68d655f9..b4900adc36 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/ObservableCtrlTest.scala @@ -139,9 +139,8 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { resSearchObservables.flatMap(_.data) must contain(exactly("observable", "in", "array", "h.fr")) } - "be able to create and get 2 observables with string data and attachment" in testApp { app => + "be able to create and get 2 observables with string data" in testApp { app => WithFakeTemporaryFile { tempFile => - val hashes = Hasher(app.apply[Configuration].get[Seq[String]]("attachment.hash"): _*).fromPath(tempFile.path).map(_.toString) val files = Seq(FilePart("attachment", "myfile.txt", Some("text/plain"), tempFile)) val dataParts = Map("_json" -> Seq(""" { @@ -164,20 +163,66 @@ class ObservableCtrlTest extends PlaySpecification with TestAppBuilder { status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") val createdObservables = contentAsJson(result).as[Seq[OutputObservable]] - createdObservables must have size 3 + createdObservables must have size 2 createdObservables.map(_.dataType) must contain(be_==("ip")).forall createdObservables.flatMap(_.data) must contain(exactly("127.0.0.1", "127.0.0.2")) createdObservables.map(_.sighted) must contain(beFalse).forall createdObservables.map(_.message) must contain(beSome("localhost")).forall createdObservables.map(_.tags) must contain(be_==(Set("local", "host"))).forall val attachmentOption = createdObservables.flatMap(_.attachment).headOption - attachmentOption must beSome - val attachment = attachmentOption.get - attachment.name must beEqualTo("myfile.txt") - attachment.hashes must containTheSameElementsAs(hashes) - attachment.size must beEqualTo(tempFile.length()) - attachment.contentType must beEqualTo("text/plain") + attachmentOption must beNone + } + } + + "be able to create and get 2 observables with string data and attachment" in testApp { app => + WithFakeTemporaryFile { tempFile => + val hasher = Hasher(app.apply[Configuration].get[Seq[String]]("attachment.hash"): _*) + val hashes = hasher.fromPath(tempFile.path).map(_.toString) + val helloHashes = hasher.fromString("Hello world").map(_.toString) + val files = Seq(FilePart("attachment", "myfile.txt", Some("text/plain"), tempFile)) + val dataParts = Map("_json" -> Seq(""" + { + "dataType":"file", + "ioc":false, + "sighted":false, + "tlp":2, + "message":"localhost", + "tags":["local", "host"], + "data":["hello.txt;text/plain;SGVsbG8gd29ybGQ="] + } + """)) + val request = FakeRequest( + "POST", + s"/api/alert/testType;testSource;ref2/artifact", + Headers("user" -> "certuser@thehive.local"), + body = AnyContentAsMultipartFormData(MultipartFormData(dataParts, files, Nil)) + ) + val result = app[ObservableCtrl].createInAlert("testType;testSource;ref2")(request) + status(result) must equalTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + val createdObservables = contentAsJson(result).as[Seq[OutputObservable]] + createdObservables must have size 2 + createdObservables.map(_.dataType) must contain(be_==("file")).forall + createdObservables.flatMap(_.data) must beEmpty + createdObservables.map(_.sighted) must contain(beFalse).forall + createdObservables.map(_.message) must contain(beSome("localhost")).forall + createdObservables.map(_.tags) must contain(be_==(Set("local", "host"))).forall + val attachments = createdObservables.flatMap(_.attachment) + attachments must have size 2 + attachments must contain(beLike[OutputAttachment] { + case attachment => + attachment.name must beEqualTo("myfile.txt") + attachment.hashes must containTheSameElementsAs(hashes) + attachment.size must beEqualTo(tempFile.length()) + attachment.contentType must beEqualTo("text/plain") + }) + attachments must contain(beLike[OutputAttachment] { + case attachment => + attachment.name must beEqualTo("hello.txt") + attachment.hashes must containTheSameElementsAs(helloHashes) + attachment.size must beEqualTo(11) + attachment.contentType must beEqualTo("text/plain") + }) createdObservables.foreach(obs => obs must equalTo(getObservable(obs._id, app[ObservableCtrl]))) ok } From 4283406d136c02e19a7b4b0263c365e1518c1b23 Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Mon, 11 Jan 2021 12:01:52 +0100 Subject: [PATCH 71/93] #1733 Add an alert-duration compnent to display the time took to import or merge and alert into a case. --- frontend/app/index.html | 1 + frontend/app/scripts/directives/alert-duration.js | 15 +++++++++++++++ frontend/app/scripts/services/api/AlertingSrv.js | 3 ++- frontend/app/views/directives/alert-duration.html | 8 ++++++++ frontend/app/views/partials/alert/list.html | 3 +++ 5 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 frontend/app/scripts/directives/alert-duration.js create mode 100644 frontend/app/views/directives/alert-duration.html diff --git a/frontend/app/index.html b/frontend/app/index.html index d419c6d6d0..4bc5afcf49 100644 --- a/frontend/app/index.html +++ b/frontend/app/index.html @@ -210,6 +210,7 @@ + diff --git a/frontend/app/scripts/directives/alert-duration.js b/frontend/app/scripts/directives/alert-duration.js new file mode 100644 index 0000000000..fbc90e06d5 --- /dev/null +++ b/frontend/app/scripts/directives/alert-duration.js @@ -0,0 +1,15 @@ +(function() { + 'use strict'; + angular.module('theHiveDirectives').directive('alertDuration', function() { + return { + restrict: 'E', + scope: { + start: '=', + end: '=', + icon: '@', + indicator: '=' + }, + templateUrl: 'views/directives/alert-duration.html' + }; + }); +})(); diff --git a/frontend/app/scripts/services/api/AlertingSrv.js b/frontend/app/scripts/services/api/AlertingSrv.js index 0d20782547..4d53efc81e 100644 --- a/frontend/app/scripts/services/api/AlertingSrv.js +++ b/frontend/app/scripts/services/api/AlertingSrv.js @@ -139,7 +139,8 @@ onUpdate: callback || undefined, operations: [ {'_name': 'listAlert'} - ] + ], + extraData: ['importDate'] }); }, diff --git a/frontend/app/views/directives/alert-duration.html b/frontend/app/views/directives/alert-duration.html new file mode 100644 index 0000000000..5a876f3de6 --- /dev/null +++ b/frontend/app/views/directives/alert-duration.html @@ -0,0 +1,8 @@ + + + {{indicator ? 'During ' : ''}}{{start | duration:end}} + + + {{start | duration}} {{indicator ? 'ago' : ''}} + + diff --git a/frontend/app/views/partials/alert/list.html b/frontend/app/views/partials/alert/list.html index b87e43732c..7ccef1b92e 100644 --- a/frontend/app/views/partials/alert/list.html +++ b/frontend/app/views/partials/alert/list.html @@ -136,6 +136,9 @@

    List of alerts ({{$vm.list.total || 0}} of {{$vm.alertList {{::event.observableCount || 0}} {{event.date | shortDate}} +
    + +
    From 64d4147d75469a638cb3b966133dd620e3efce65 Mon Sep 17 00:00:00 2001 From: To-om Date: Mon, 11 Jan 2021 11:28:48 +0100 Subject: [PATCH 72/93] #1738 Optimize alertAlreadyImportes query --- ScalliGraph | 2 +- thehive/app/org/thp/thehive/services/AlertSrv.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ScalliGraph b/ScalliGraph index acc3bea235..33fcd753fa 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit acc3bea235989e57f8b43fd0780f6905437cc9e2 +Subproject commit 33fcd753fa102062ab54411fef169c847f1501db diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index 082fa64ee6..f18809bd4f 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -72,8 +72,8 @@ class AlertSrv @Inject() ( graph: Graph, authContext: AuthContext ): Try[RichAlert] = { - val alertAlreadyExist = organisationSrv.get(organisation).alerts.getBySourceId(alert.`type`, alert.source, alert.sourceRef).getCount - if (alertAlreadyExist > 0) + val alertAlreadyExist = startTraversal.getBySourceId(alert.`type`, alert.source, alert.sourceRef).organisation.current.exists + if (alertAlreadyExist) Failure(CreateError(s"Alert ${alert.`type`}:${alert.source}:${alert.sourceRef} already exist in organisation ${organisation.name}")) else for { From 0b2c5241daa57aafd6cb5f4aab70ae5e4d7fa4ef Mon Sep 17 00:00:00 2001 From: To-om Date: Mon, 11 Jan 2021 15:17:29 +0100 Subject: [PATCH 73/93] #1738 Prevent multiple merge of an alert --- .../org/thp/thehive/services/AlertSrv.scala | 50 +++++++++++-------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index f18809bd4f..fc6a60b5a2 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -7,7 +7,7 @@ import org.thp.scalligraph.query.PropertyUpdater import org.thp.scalligraph.services._ import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{Converter, IdentityConverter, StepLabel, Traversal} -import org.thp.scalligraph.{CreateError, EntityId, EntityIdOrName, RichOptionTry, RichSeq} +import org.thp.scalligraph.{BadRequestError, CreateError, EntityId, EntityIdOrName, RichOptionTry, RichSeq} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputCustomFieldValue import org.thp.thehive.models._ @@ -280,28 +280,31 @@ class AlertSrv @Inject() ( } yield updatedCase def mergeInCase(alert: Alert with Entity, `case`: Case with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Case with Entity] = - auditSrv - .mergeAudits { - // No audit for markAsRead and observables - // Audits for customFields, description and tags - val description = `case`.description + s"\n \n#### Merged with alert #${alert.sourceRef} ${alert.title}\n\n${alert.description.trim}" - for { - _ <- markAsRead(alert._id) - _ <- importObservables(alert, `case`) - _ <- importCustomFields(alert, `case`) - _ <- caseSrv.addTags(`case`, get(alert).tags.toSeq.map(_.toString).toSet) - _ <- alertCaseSrv.create(AlertCase(), alert, `case`) - c <- caseSrv.get(`case`).update(_.description, description).getOrFail("Case") - details <- Success( - Json.obj( - "customFields" -> get(alert).richCustomFields.toSeq.map(_.toOutput.toJson), - "description" -> c.description, - "tags" -> caseSrv.get(`case`).tags.toSeq.map(_.toString) + if (get(alert).isImported) + Failure(BadRequestError("Alert is already imported")) + else + auditSrv + .mergeAudits { + // No audit for markAsRead and observables + // Audits for customFields, description and tags + val description = `case`.description + s"\n \n#### Merged with alert #${alert.sourceRef} ${alert.title}\n\n${alert.description.trim}" + for { + _ <- markAsRead(alert._id) + _ <- importObservables(alert, `case`) + _ <- importCustomFields(alert, `case`) + _ <- caseSrv.addTags(`case`, get(alert).tags.toSeq.map(_.toString).toSet) + _ <- alertCaseSrv.create(AlertCase(), alert, `case`) + c <- caseSrv.get(`case`).update(_.description, description).getOrFail("Case") + details <- Success( + Json.obj( + "customFields" -> get(alert).richCustomFields.toSeq.map(_.toOutput.toJson), + "description" -> c.description, + "tags" -> caseSrv.get(`case`).tags.toSeq.map(_.toString) + ) ) - ) - } yield details - }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details))) - .flatMap(_ => caseSrv.getOrFail(`case`._id)) + } yield details + }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details))) + .flatMap(_ => caseSrv.getOrFail(`case`._id)) def importObservables(alert: Alert with Entity, `case`: Case with Entity)(implicit graph: Graph, @@ -400,6 +403,9 @@ object AlertOps { def imported: Traversal[Boolean, Boolean, IdentityConverter[Boolean]] = traversal.choose(_.outE[AlertCase], onTrue = true, onFalse = false) + def isImported: Boolean = + traversal.outE[AlertCase].exists + def importDate: Traversal[Date, Date, Converter[Date, Date]] = traversal.outE[AlertCase].value(_._createdAt) From cc9fc52bd8bb87fe24be6ee044df6604d6f60062 Mon Sep 17 00:00:00 2001 From: To-om Date: Mon, 11 Jan 2021 15:18:20 +0100 Subject: [PATCH 74/93] #1738 Add integrity checks on alerts --- .../app/org/thp/thehive/TheHiveModule.scala | 1 + .../org/thp/thehive/services/AlertSrv.scala | 21 ++++++++++++++++++- thehive/conf/reference.conf | 4 ++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/TheHiveModule.scala b/thehive/app/org/thp/thehive/TheHiveModule.scala index 4c752f9926..5edddcdc86 100644 --- a/thehive/app/org/thp/thehive/TheHiveModule.scala +++ b/thehive/app/org/thp/thehive/TheHiveModule.scala @@ -101,6 +101,7 @@ class TheHiveModule(environment: Environment, configuration: Configuration) exte integrityCheckOpsBindings.addBinding.to[CaseTemplateIntegrityCheckOps] integrityCheckOpsBindings.addBinding.to[DataIntegrityCheckOps] integrityCheckOpsBindings.addBinding.to[CaseIntegrityCheckOps] + integrityCheckOpsBindings.addBinding.to[AlertIntegrityCheckOps] bind[ActorRef].annotatedWithName("integrity-check-actor").toProvider[IntegrityCheckActorProvider] bind[ActorRef].annotatedWithName("flow-actor").toProvider[FlowActorProvider] diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index fc6a60b5a2..41859c2590 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -1,5 +1,6 @@ package org.thp.thehive.services +import akka.actor.ActorRef import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.{AuthContext, Permission} import org.thp.scalligraph.models._ @@ -32,7 +33,8 @@ class AlertSrv @Inject() ( customFieldSrv: CustomFieldSrv, caseTemplateSrv: CaseTemplateSrv, observableSrv: ObservableSrv, - auditSrv: AuditSrv + auditSrv: AuditSrv, + @Named("integrity-check-actor") integrityCheckActor: ActorRef )(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Alert] { @@ -268,6 +270,7 @@ class AlertSrv @Inject() ( _ <- importObservables(alert.alert, createdCase.`case`) _ <- alertCaseSrv.create(AlertCase(), alert.alert, createdCase.`case`) _ <- markAsRead(alert._id) + _ = integrityCheckActor ! EntityAdded("Alert") } yield createdCase } }(richCase => auditSrv.`case`.create(richCase.`case`, richCase.toJson)) @@ -304,6 +307,7 @@ class AlertSrv @Inject() ( ) } yield details }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details))) + .map(_ => integrityCheckActor ! EntityAdded("Alert")) .flatMap(_ => caseSrv.getOrFail(`case`._id)) def importObservables(alert: Alert with Entity, `case`: Case with Entity)(implicit @@ -591,3 +595,18 @@ object AlertOps { implicit class AlertCustomFieldsOpsDefs(traversal: Traversal.E[AlertCustomField]) extends CustomFieldValueOpsDefs(traversal) } + +class AlertIntegrityCheckOps @Inject() (@Named("with-thehive-schema") val db: Database, val service: AlertSrv) extends IntegrityCheckOps[Alert] { + override def check(): Unit = { + db.tryTransaction { implicit graph => + service + .startTraversal + .flatMap(_.outE[AlertCase].range(1, 100)) + .remove() + Success(()) + } + () + } + + override def resolve(entities: Seq[Alert with Entity])(implicit graph: Graph): Try[Unit] = Success(()) +} diff --git a/thehive/conf/reference.conf b/thehive/conf/reference.conf index 403e23198c..523fb7d7d1 100644 --- a/thehive/conf/reference.conf +++ b/thehive/conf/reference.conf @@ -163,6 +163,10 @@ integrityCheck { initialDelay: 1 minute interval: 10 minutes } + alert { + initialDelay: 5 minute + interval: 30 minutes + } } From 7f0d11605a8b30ac72972a854bf01ad197af07df Mon Sep 17 00:00:00 2001 From: Nabil Adouani Date: Tue, 12 Jan 2021 05:48:30 +0100 Subject: [PATCH 75/93] #1744 Alert observable rows are not clickable --- .../app/views/components/alert/observable-list.component.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/views/components/alert/observable-list.component.html b/frontend/app/views/components/alert/observable-list.component.html index ef2b49c25a..972b390937 100644 --- a/frontend/app/views/components/alert/observable-list.component.html +++ b/frontend/app/views/components/alert/observable-list.component.html @@ -32,7 +32,7 @@ - + From 13f6a23b2caac67fbfc62134721c211ed02b78cd Mon Sep 17 00:00:00 2001 From: To-om Date: Tue, 12 Jan 2021 08:44:55 +0100 Subject: [PATCH 76/93] Remove inconsistent test --- .../misp/services/MispImportSrvTest.scala | 80 +++++++++---------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala index 734d52dc47..97042b73c9 100644 --- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala +++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala @@ -71,44 +71,44 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification } } - "MISP service" should { - "import events" in testApp { app => - app[Database].roTransaction { implicit graph => - app[MispImportSrv].syncMispEvents(app[TheHiveMispClient]) - app[AlertSrv].startTraversal.getBySourceId("misp", "ORGNAME", "1").visible.getOrFail("Alert") - } must beSuccessfulTry( - Alert( - `type` = "misp", - source = "ORGNAME", - sourceRef = "1", - externalLink = Some("https://misp.test/events/1"), - title = "#1 test1 -> 1.2", - description = s"Imported from MISP Event #1, created at ${Event.simpleDateFormat.parse("2019-08-23")}", - severity = 3, - date = Event.simpleDateFormat.parse("2019-08-23"), - lastSyncDate = new Date(1566913355000L), - tlp = 2, - pap = 2, - read = false, - follow = true - ) - ).eventually(5, 100.milliseconds) - - val observables = app[Database] - .roTransaction { implicit graph => - app[OrganisationSrv] - .get(EntityName("admin")) - .alerts - .getBySourceId("misp", "ORGNAME", "1") - .observables - .richObservable - .toList - } - .map(o => (o.`type`.name, o.data.map(_.data), o.tlp, o.message, o.tags.map(_.toString).toSet)) -// println(observables.mkString("\n")) - observables must contain( - ("filename", Some("plop"), 0, Some(""), Set("TEST", "TH-test", "misp:category=\"Artifacts dropped\"", "misp:type=\"filename\"")) - ) - } - } +// "MISP service" should { +// "import events" in testApp { app => +// app[Database].roTransaction { implicit graph => +// app[MispImportSrv].syncMispEvents(app[TheHiveMispClient]) +// app[AlertSrv].startTraversal.getBySourceId("misp", "ORGNAME", "1").visible.getOrFail("Alert") +// } must beSuccessfulTry( +// Alert( +// `type` = "misp", +// source = "ORGNAME", +// sourceRef = "1", +// externalLink = Some("https://misp.test/events/1"), +// title = "#1 test1 -> 1.2", +// description = s"Imported from MISP Event #1, created at ${Event.simpleDateFormat.parse("2019-08-23")}", +// severity = 3, +// date = Event.simpleDateFormat.parse("2019-08-23"), +// lastSyncDate = new Date(1566913355000L), +// tlp = 2, +// pap = 2, +// read = false, +// follow = true +// ) +// ).eventually(5, 100.milliseconds) +// +// val observables = app[Database] +// .roTransaction { implicit graph => +// app[OrganisationSrv] +// .get(EntityName("admin")) +// .alerts +// .getBySourceId("misp", "ORGNAME", "1") +// .observables +// .richObservable +// .toList +// } +// .map(o => (o.`type`.name, o.data.map(_.data), o.tlp, o.message, o.tags.map(_.toString).toSet)) +//// println(observables.mkString("\n")) +// observables must contain( +// ("filename", Some("plop"), 0, Some(""), Set("TEST", "TH-test", "misp:category=\"Artifacts dropped\"", "misp:type=\"filename\"")) +// ) +// } +// } } From ee97f71f4c7f0f1e7c02bf69e62bf7a48278d5f0 Mon Sep 17 00:00:00 2001 From: To-om Date: Tue, 12 Jan 2021 08:45:19 +0100 Subject: [PATCH 77/93] Release 4.0.4 --- CHANGELOG.md | 22 ++++++++++++++++++---- build.sbt | 2 +- frontend/bower.json | 2 +- frontend/package.json | 2 +- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cbab6d666..b5b000c62a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,21 +1,35 @@ # Change Log +## [4.0.4](https://github.com/TheHive-Project/TheHive/milestone/67) (2021-01-12) + +**Implemented enhancements:** + +- [Feature Request] Add alert observable API endpoints [\#1732](https://github.com/TheHive-Project/TheHive/issues/1732) +- [Feature Request] Add alert import date property [\#1733](https://github.com/TheHive-Project/TheHive/issues/1733) +- [Feature Request] Add handling duration properties to imported Alert type [\#1734](https://github.com/TheHive-Project/TheHive/issues/1734) + +**Fixed bugs:** + +- [Bug] TheHive doesn't start if cassandra is not ready [\#1725](https://github.com/TheHive-Project/TheHive/issues/1725) +- [Bug] Alert imported multiple times (bis) [\#1738](https://github.com/TheHive-Project/TheHive/issues/1738) +- [Bug] Cosmetic fix in alert observables list [\#1744](https://github.com/TheHive-Project/TheHive/issues/1744) + ## [4.0.3](https://github.com/TheHive-Project/TheHive/milestone/66) (2020-12-22) **Implemented enhancements:** - Providing output details for Responders [\#1293](https://github.com/TheHive-Project/TheHive/issues/1293) - [Enhancement] Change artifacts by observables on the onMouseOver tooltip of the eye icon of observable [\#1695](https://github.com/TheHive-Project/TheHive/issues/1695) -- [Bug] Enhance support of S3 for attachment storage [\#1705](https://github.com/TheHive-Project/TheHive/issues/1705) -- Update the headers of basic info sections [\#1710](https://github.com/TheHive-Project/TheHive/issues/1710) +- [Enhancement] Enhance support of S3 for attachment storage [\#1705](https://github.com/TheHive-Project/TheHive/issues/1705) +- [Enhancement] Update the headers of basic info sections [\#1710](https://github.com/TheHive-Project/TheHive/issues/1710) - [Enhancement] Add poll duration config for UI Stream [\#1720](https://github.com/TheHive-Project/TheHive/issues/1720) **Fixed bugs:** - [Bug] MISP filters are not correctly implemented [\#1685](https://github.com/TheHive-Project/TheHive/issues/1685) - [Bug] The query "getObservable" doesn't work for alert observables [\#1691](https://github.com/TheHive-Project/TheHive/issues/1691) -- Click analyzers mini-report does not load the full report [\#1694](https://github.com/TheHive-Project/TheHive/issues/1694) -- [TH4] Import file observable in gui generate error [\#1697](https://github.com/TheHive-Project/TheHive/issues/1697) +- [Bug] Click analyzers mini-report does not load the full report [\#1694](https://github.com/TheHive-Project/TheHive/issues/1694) +- [Bug] Import file observable in gui generate error [\#1697](https://github.com/TheHive-Project/TheHive/issues/1697) - [Bug] Cannot search for alerts per observables [\#1707](https://github.com/TheHive-Project/TheHive/issues/1707) - [Bug] Serialization problem in cluster mode [\#1708](https://github.com/TheHive-Project/TheHive/issues/1708) - [Bug] Issue with sorting [\#1716](https://github.com/TheHive-Project/TheHive/issues/1716) diff --git a/build.sbt b/build.sbt index d3fb4daeb0..b91c194ce2 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ import Dependencies._ import com.typesafe.sbt.packager.Keys.bashScriptDefines import org.thp.ghcl.Milestone -val thehiveVersion = "4.0.3-1" +val thehiveVersion = "4.0.4-1" val scala212 = "2.12.12" val scala213 = "2.13.1" val supportedScalaVersions = List(scala212, scala213) diff --git a/frontend/bower.json b/frontend/bower.json index ba58fd6409..6a6988cc69 100644 --- a/frontend/bower.json +++ b/frontend/bower.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.3-1", + "version": "4.0.4-1", "license": "AGPL-3.0", "dependencies": { "jquery": "^3.4.1", diff --git a/frontend/package.json b/frontend/package.json index 963e781297..05da8f241d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.3-1", + "version": "4.0.4-1", "license": "AGPL-3.0", "repository": { "type": "git", From d33fc9747e89fc19696d5d6b9c8226146e2eb943 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 14 Jan 2021 14:39:08 +0100 Subject: [PATCH 78/93] #1732 Update Router v1 --- thehive/app/org/thp/thehive/controllers/v1/Router.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 314dd5ed5f..4b5d5a6bcc 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -52,7 +52,8 @@ class Router @Inject() ( // case POST(p"/case/_stats") => caseCtrl.stats() // case GET(p"/case/$caseId/links") => caseCtrl.linkedCases(caseId) - case POST(p"/case/$caseId/observable") => observableCtrl.create(caseId) + case POST(p"/case/$caseId/observable") => observableCtrl.createInCase(caseId) + case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) case GET(p"/observable/$observableId") => observableCtrl.get(observableId) case DELETE(p"/observable/$observableId") => observableCtrl.delete(observableId) case PATCH(p"/observable/_bulk") => observableCtrl.bulkUpdate From 44cd806e149e588ea24f1604e124c27f916764b6 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 7 Jan 2021 16:05:37 +0100 Subject: [PATCH 79/93] WIP Technique creation ok --- .../{Organistion.scala => Organisation.scala} | 0 .../org/thp/thehive/dto/v1/Technique.scala | 102 ++++++++++++++++++ .../thehive/controllers/v1/Conversion.scala | 41 ++++++- .../thehive/controllers/v1/Properties.scala | 4 + .../thp/thehive/controllers/v1/Router.scala | 27 +++-- .../controllers/v1/TechniqueCtrl.scala | 87 +++++++++++++++ thehive/app/org/thp/thehive/models/Case.scala | 7 ++ .../org/thp/thehive/models/Permissions.scala | 2 + .../org/thp/thehive/models/Technique.scala | 40 +++++++ .../models/TheHiveSchemaDefinition.scala | 4 + .../thp/thehive/services/TechniqueSrv.scala | 58 ++++++++++ 11 files changed, 365 insertions(+), 7 deletions(-) rename dto/src/main/scala/org/thp/thehive/dto/v1/{Organistion.scala => Organisation.scala} (100%) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala create mode 100644 thehive/app/org/thp/thehive/models/Technique.scala create mode 100644 thehive/app/org/thp/thehive/services/TechniqueSrv.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Organistion.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Organisation.scala similarity index 100% rename from dto/src/main/scala/org/thp/thehive/dto/v1/Organistion.scala rename to dto/src/main/scala/org/thp/thehive/dto/v1/Organisation.scala diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala new file mode 100644 index 0000000000..936ecfa2c4 --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala @@ -0,0 +1,102 @@ +package org.thp.thehive.dto.v1 + +import play.api.libs.json.{Format, Json, Reads, Writes} + +import java.util.Date + +case class InputTechnique( + external_id: String, + name: String, + description: Option[String], + kill_chain_phases: Seq[InputKillChainPhase], + url: String, + `type`: String, + x_mitre_platforms: Seq[String], + x_mitre_data_sources: Seq[String], + x_mitre_version: Option[String] +) + +case class InputReference( + source_name: String, + external_id: String, + url: String +) + +case class InputKillChainPhase( + kill_chain_name: String, + phase_name: String +) + +object InputReference { + implicit val reads: Reads[InputReference] = Reads[InputReference] { json => + for { + source_name <- (json \ "source_name").validate[String] + external_id <- (json \ "external_id").validateOpt[String] + url <- (json \ "url").validateOpt[String] + } yield InputReference( + source_name, + external_id.getOrElse(""), + url.getOrElse("") + ) + } + + implicit val writes: Writes[InputReference] = Json.writes[InputReference] +} + +object InputKillChainPhase { + implicit val reads: Reads[InputKillChainPhase] = Reads[InputKillChainPhase] { json => + for { + kill_chain_name <- (json \ "kill_chain_name").validate[String] + phase_name <- (json \ "phase_name").validate[String] + } yield InputKillChainPhase( + kill_chain_name, + phase_name + ) + } + + implicit val writes: Writes[InputKillChainPhase] = Json.writes[InputKillChainPhase] +} + +object InputTechnique { + implicit val reads: Reads[InputTechnique] = Reads[InputTechnique] { json => + for { + references <- (json \ "external_references").validate[Seq[InputReference]] + mitreReference = references.find(_.source_name == "mitre-attack") + name <- (json \ "name").validate[String] + description <- (json \ "description").validateOpt[String] + kill_chain_phases <- (json \ "kill_chain_phases").validateOpt[Seq[InputKillChainPhase]] + techniqueType <- (json \ "type").validate[String] + x_mitre_platforms <- (json \ "x_mitre_platforms").validateOpt[Seq[String]] + x_mitre_data_sources <- (json \ "x_mitre_data_sources").validateOpt[Seq[String]] + x_mitre_version <- (json \ "x_mitre_version").validateOpt[String] + } yield InputTechnique( + mitreReference.map(_.external_id).getOrElse(""), + name, + description, + kill_chain_phases.getOrElse(Seq()), + mitreReference.map(_.url).getOrElse(""), + techniqueType, + x_mitre_platforms.getOrElse(Seq()), + x_mitre_data_sources.getOrElse(Seq()), + x_mitre_version + ) + } + + implicit val writes: Writes[InputTechnique] = Json.writes[InputTechnique] +} + +case class OutputTechnique( + _id: String, + _type: String, + _createdBy: String, + _updatedBy: Option[String] = None, + _createdAt: Date, + _updatedAt: Option[Date] = None, + name: String, + description: Option[String], + parent: Option[String] +) + +object OutputTechnique { + implicit val format: Format[OutputTechnique] = Json.format[OutputTechnique] +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index f1dd620ad6..9e71bbf9cd 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -201,7 +201,7 @@ object Conversion { .transform ) - implicit val organiastionRenderer: Renderer.Aux[Organisation with Entity, OutputOrganisation] = + implicit val organisationRenderer: Renderer.Aux[Organisation with Entity, OutputOrganisation] = Renderer.toJson[Organisation with Entity, OutputOrganisation](organisation => OutputOrganisation( organisation._id.toString, @@ -343,6 +343,7 @@ object Conversion { .withFieldComputed(_.tlp, _.tlp.getOrElse(2)) .transform } + implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable](richObservable => richObservable .into[OutputObservable] @@ -457,4 +458,42 @@ object Conversion { .transform } + implicit class InputTechniqueOps(inputTechnique: InputTechnique) { + def toTechnique: Technique = + inputTechnique + .into[Technique] + .withFieldRenamed(_.external_id, _.techniqueId) + .withFieldComputed(_.tactics, _.kill_chain_phases.map(_.phase_name)) + .withFieldRenamed(_.`type`, _.techniqueType) + .withFieldRenamed(_.x_mitre_platforms, _.platforms) + .withFieldRenamed(_.x_mitre_data_sources, _.dataSources) + .withFieldRenamed(_.x_mitre_version, _.version) + .transform + } + + implicit val richTechniqueRenderer: Renderer.Aux[RichTechnique, OutputTechnique] = + Renderer.toJson[RichTechnique, OutputTechnique](technique => + technique + .into[OutputTechnique] + .withFieldComputed(_._id, _._id.toString) + .withFieldConst(_._type, "Technique") + .withFieldComputed(_.parent, _.parent.map(_.name)) + .transform + ) + + implicit val techniqueRenderer: Renderer.Aux[Technique with Entity, OutputTechnique] = + Renderer.toJson[Technique with Entity, OutputTechnique](technique => + OutputTechnique( + technique._id.toString, + "technique", + technique._createdBy, + technique._updatedBy, + technique._createdAt, + technique._updatedAt, + technique.name, + technique.description, + None + ) + ) + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index a83f09c829..2106c2ba07 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -460,4 +460,8 @@ class Properties @Inject() ( .property("attachment.contentType", UMapping.string.optional)(_.select(_.attachments.value(_.contentType)).readonly) .property("attachment.id", UMapping.string.optional)(_.select(_.attachments.value(_.attachmentId)).readonly) .build + + lazy val technique: PublicProperties = + PublicPropertyListBuilder[Technique].build + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 4b5d5a6bcc..c7c340f506 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -27,9 +27,10 @@ class Router @Inject() ( taskCtrl: TaskCtrl, shareCtrl: ShareCtrl, userCtrl: UserCtrl, - statusCtrl: StatusCtrl + statusCtrl: StatusCtrl, // streamCtrl: StreamCtrl, - // tagCtrl: TagCtrl + // tagCtrl: TagCtrl, + techniqueCtrl: TechniqueCtrl ) extends SimpleRouter { override def routes: Routes = { @@ -126,10 +127,24 @@ class Router @Inject() ( // POST /alert/:alertId/merge/:caseId controllers.AlertCtrl.mergeWithCase(alertId, caseId) case GET(p"/audit") => auditCtrl.flow -// GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) -// GET /audit controllers.AuditCtrl.find() -// POST /audit/_search controllers.AuditCtrl.find() -// POST /audit/_stats controllers.AuditCtrl.stats() + // GET /flow controllers.AuditCtrl.flow(rootId: Option[String], count: Option[Int]) + // GET /audit controllers.AuditCtrl.find() + // POST /audit/_search controllers.AuditCtrl.find() + // POST /audit/_stats controllers.AuditCtrl.stats() + + // MITRE + case POST(p"/technique/mitre/import") => techniqueCtrl.importMitre + // list procedures + // list patterns + // list tactics + // list techniques + // list subtechniques + // list by id + // link to pattern + // unlink + // fill procedure + // Query : pattern name, pattern id + // List pattern id associated to a Case case POST(p"/profile") => profileCtrl.create case GET(p"/profile/$profileId") => profileCtrl.get(profileId) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala new file mode 100644 index 0000000000..56484a9600 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala @@ -0,0 +1,87 @@ +package org.thp.thehive.controllers.v1 + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.{BadRequestError, EntityIdOrName} +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.InputTechnique +import org.thp.thehive.models.{Permissions, RichTechnique, Technique} +import org.thp.thehive.services.TechniqueOps._ +import org.thp.thehive.services.TechniqueSrv +import play.api.libs.json.{JsArray, Json} +import play.api.mvc.{Action, AnyContent, Results} + +import java.io.FileInputStream +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Failure, Success, Try} + +@Singleton +class TechniqueCtrl @Inject() ( + entrypoint: Entrypoint, + properties: Properties, + techniqueSrv: TechniqueSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + override val entityName: String = "technique" + override val publicProperties: PublicProperties = properties.technique + override val initialQuery: Query = Query.init[Traversal.V[Technique]]( + "listTechnique", + (graph, _) => + techniqueSrv + .startTraversal(graph) + ) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Technique], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, techniqueSteps, _) => techniqueSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTechnique) + ) + override val outputQuery: Query = Query.output[RichTechnique, Traversal.V[Technique]](_.richTechnique) + override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Technique]]( + "getTechnique", + FieldsParser[EntityIdOrName], + (idOrName, graph, _) => techniqueSrv.get(idOrName)(graph) + ) + + def importMitre: Action[AnyContent] = + entrypoint("import MITRE ATT&CK techniques") + .extract("file", FieldsParser.file.on("file")) + .authPermitted(Permissions.manageTechnique) { implicit request => + val file: FFile = request.body("file") + + for { + inputTechniques <- parseJsonFile(file) + richTechniques = + inputTechniques + .foldLeft[JsArray](JsArray.empty) { (array, inputTechnique) => + val res = db.tryTransaction { implicit graph => + createFromInput(inputTechnique) + } match { + case Failure(e) => + Json.obj("status" -> "Failure", "message" -> e.getMessage) + case Success(t) => + Json.obj("status" -> "Success", "mitreId" -> t.techniqueId, "techniqueName" -> t.name) + } + array :+ res + } + } yield Results.Created(richTechniques) + } + + private def parseJsonFile(file: FFile): Try[Seq[InputTechnique]] = + for { + stream <- Try(new FileInputStream(file.filepath.toString)) + json = Json.parse(stream) + } yield (json \ "objects").get.as[Seq[InputTechnique]] + + private def createFromInput(inputTechnique: InputTechnique)(implicit graph: Graph, authContext: AuthContext): Try[Technique with Entity] = + if (inputTechnique.external_id.isEmpty) + Failure(BadRequestError(s"A technique with no MITRE id cannot be imported")) + else if (techniqueSrv.startTraversal.alreadyImported(inputTechnique.external_id)) + Failure(BadRequestError(s"A technique with MITRE id '${inputTechnique.external_id}' already exists in this organisation")) + else + techniqueSrv.createEntity(inputTechnique.toTechnique) +} diff --git a/thehive/app/org/thp/thehive/models/Case.scala b/thehive/app/org/thp/thehive/models/Case.scala index 1990523baf..da04ceff3b 100644 --- a/thehive/app/org/thp/thehive/models/Case.scala +++ b/thehive/app/org/thp/thehive/models/Case.scala @@ -77,6 +77,13 @@ case class CaseUser() @BuildEdgeEntity[Case, CaseTemplate] case class CaseCaseTemplate() +@BuildEdgeEntity[Case, Technique] +case class CaseTechnique( + description: Option[String] = None + // date occurence + // date sighting +) + @BuildVertexEntity @DefineIndex(IndexType.unique, "number") //@DefineIndex(IndexType.fulltext, "title") diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index 14b45cf5fc..9b049a0745 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -20,6 +20,7 @@ object Permissions extends Perms { lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation") lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation") lazy val manageObservableTemplate: PermissionDesc = PermissionDesc("manageObservableTemplate", "Manage observable types", "admin") + lazy val manageTechnique: PermissionDesc = PermissionDesc("manageTechnique", "Manage techniques", "admin") lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation") lazy val list: Set[PermissionDesc] = @@ -41,6 +42,7 @@ object Permissions extends Perms { manageAnalyse, managePage, manageObservableTemplate, + manageTechnique, accessTheHiveFS ) diff --git a/thehive/app/org/thp/thehive/models/Technique.scala b/thehive/app/org/thp/thehive/models/Technique.scala new file mode 100644 index 0000000000..f651db248c --- /dev/null +++ b/thehive/app/org/thp/thehive/models/Technique.scala @@ -0,0 +1,40 @@ +package org.thp.thehive.models + +import org.thp.scalligraph.models.Entity +import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} + +import java.util.Date + +@BuildVertexEntity +case class Technique( + techniqueId: String, + name: String, + description: Option[String], + tactics: Seq[String], + url: String, + techniqueType: String, + platforms: Seq[String], + dataSources: Seq[String], + // TODO capec id + version: Option[String] +) + +@BuildEdgeEntity[Technique, Technique] +case class TechniqueTechnique() + +case class RichTechnique(technique: Technique with Entity, parent: Option[Technique with Entity]) { + def techniqueId: String = technique.techniqueId + def name: String = technique.name + def description: Option[String] = technique.description + def tactics: Seq[String] = technique.tactics + def url: String = technique.url + def techniqueType: String = technique.techniqueType + def platforms: Seq[String] = technique.platforms + def dataSources: Seq[String] = technique.dataSources + def version: Option[String] = technique.version + def _id: EntityId = technique._id + def _createdAt: Date = technique._createdAt + def _createdBy: String = technique._createdBy + def _updatedAt: Option[Date] = technique._updatedAt + def _updatedBy: Option[String] = technique._updatedBy +} diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index da0fe21586..4913d3617f 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -89,6 +89,10 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { traversal.outE[ShareTask].raw.property("actionRequired", false).iterate() Success(()) } + .updateGraph("Add manageTechnique permission to admin profile", "Profile") { traversal => + traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTechnique").iterate() + Success(()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/thehive/app/org/thp/thehive/services/TechniqueSrv.scala b/thehive/app/org/thp/thehive/services/TechniqueSrv.scala new file mode 100644 index 0000000000..d35aca6b9f --- /dev/null +++ b/thehive/app/org/thp/thehive/services/TechniqueSrv.scala @@ -0,0 +1,58 @@ +package org.thp.thehive.services + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.services._ +import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.thehive.models._ +import org.thp.thehive.services.TechniqueOps._ + +import java.util.{Map => JMap} +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Success, Try} + +@Singleton +class TechniqueSrv @Inject() ()(implicit + @Named("with-thehive-schema") db: Database +) extends VertexSrv[Technique] { + val techniqueTechniqueSrv = new EdgeSrv[TechniqueTechnique, Technique, Technique] + + def parentExists(child: Technique with Entity, parent: Technique with Entity)(implicit graph: Graph): Boolean = + child._id == parent._id || get(child).parent.getEntity(parent).exists + + def setParent(child: Technique with Entity, parent: Technique with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] = + if (parentExists(child, parent)) Success(()) + else techniqueTechniqueSrv.create(TechniqueTechnique(), child, parent).map(_ => ()) + +} + +object TechniqueOps { + implicit class TechniqueOpsDefs(traversal: Traversal.V[Technique]) { + + def get(idOrName: EntityIdOrName): Traversal.V[Technique] = + idOrName.fold(traversal.getByIds(_), _ => traversal.limit(0)) + + def getByTechniqueId(techniqueId: String): Traversal.V[Technique] = traversal.has(_.techniqueId, techniqueId) + + def parent: Traversal.V[Technique] = + traversal.in[TechniqueTechnique].v[Technique] + + def alreadyImported(techniqueId: String): Boolean = + traversal.getByTechniqueId(techniqueId).exists + + def richTechnique: Traversal[RichTechnique, JMap[String, Any], Converter[RichTechnique, JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.in[TechniqueTechnique].v[Technique].fold) + ) + .domainMap { + case (technique, parent) => + RichTechnique(technique, parent.headOption) + } + + } +} From cb6734dac2e203af5ba4b6e334bb85cf0020ad1a Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Fri, 8 Jan 2021 16:27:31 +0100 Subject: [PATCH 80/93] Mitre json import & list techniques ok --- .../org/thp/thehive/dto/v1/Technique.scala | 28 +++++++++++++------ .../thehive/controllers/v1/Conversion.scala | 17 +---------- .../controllers/v1/TechniqueCtrl.scala | 16 +++++++++-- .../controllers/v1/TheHiveQueryExecutor.scala | 2 ++ .../org/thp/thehive/models/Technique.scala | 1 - 5 files changed, 36 insertions(+), 28 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala index 936ecfa2c4..2188685f60 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala @@ -13,6 +13,7 @@ case class InputTechnique( `type`: String, x_mitre_platforms: Seq[String], x_mitre_data_sources: Seq[String], + x_mitre_is_subtechnique: Option[Boolean], x_mitre_version: Option[String] ) @@ -62,13 +63,14 @@ object InputTechnique { for { references <- (json \ "external_references").validate[Seq[InputReference]] mitreReference = references.find(_.source_name == "mitre-attack") - name <- (json \ "name").validate[String] - description <- (json \ "description").validateOpt[String] - kill_chain_phases <- (json \ "kill_chain_phases").validateOpt[Seq[InputKillChainPhase]] - techniqueType <- (json \ "type").validate[String] - x_mitre_platforms <- (json \ "x_mitre_platforms").validateOpt[Seq[String]] - x_mitre_data_sources <- (json \ "x_mitre_data_sources").validateOpt[Seq[String]] - x_mitre_version <- (json \ "x_mitre_version").validateOpt[String] + name <- (json \ "name").validate[String] + description <- (json \ "description").validateOpt[String] + kill_chain_phases <- (json \ "kill_chain_phases").validateOpt[Seq[InputKillChainPhase]] + techniqueType <- (json \ "type").validate[String] + x_mitre_platforms <- (json \ "x_mitre_platforms").validateOpt[Seq[String]] + x_mitre_data_sources <- (json \ "x_mitre_data_sources").validateOpt[Seq[String]] + x_mitre_is_subtechnique <- (json \ "x_mitre_is_subtechnique").validateOpt[Boolean] + x_mitre_version <- (json \ "x_mitre_version").validateOpt[String] } yield InputTechnique( mitreReference.map(_.external_id).getOrElse(""), name, @@ -78,6 +80,7 @@ object InputTechnique { techniqueType, x_mitre_platforms.getOrElse(Seq()), x_mitre_data_sources.getOrElse(Seq()), + x_mitre_is_subtechnique, x_mitre_version ) } @@ -89,11 +92,18 @@ case class OutputTechnique( _id: String, _type: String, _createdBy: String, - _updatedBy: Option[String] = None, + _updatedBy: Option[String], _createdAt: Date, - _updatedAt: Option[Date] = None, + _updatedAt: Option[Date], + techniqueId: String, name: String, description: Option[String], + tactics: Seq[String], + url: String, + techniqueType: String, + platforms: Seq[String], + dataSources: Seq[String], + version: Option[String], parent: Option[String] ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 9e71bbf9cd..242a5d0204 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -477,23 +477,8 @@ object Conversion { .into[OutputTechnique] .withFieldComputed(_._id, _._id.toString) .withFieldConst(_._type, "Technique") - .withFieldComputed(_.parent, _.parent.map(_.name)) + .withFieldComputed(_.parent, _.parent.map(_.techniqueId)) .transform ) - implicit val techniqueRenderer: Renderer.Aux[Technique with Entity, OutputTechnique] = - Renderer.toJson[Technique with Entity, OutputTechnique](technique => - OutputTechnique( - technique._id.toString, - "technique", - technique._createdBy, - technique._updatedBy, - technique._createdAt, - technique._updatedAt, - technique.name, - technique.description, - None - ) - ) - } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala index 56484a9600..e2fa1e85d6 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala @@ -2,12 +2,12 @@ package org.thp.thehive.controllers.v1 import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.AuthContext -import org.thp.scalligraph.{BadRequestError, EntityIdOrName} import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.scalligraph.{BadRequestError, EntityIdOrName} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputTechnique import org.thp.thehive.models.{Permissions, RichTechnique, Technique} @@ -57,6 +57,7 @@ class TechniqueCtrl @Inject() ( inputTechniques <- parseJsonFile(file) richTechniques = inputTechniques + .sortBy(_.external_id.length) // sort to create sub-techniques after their parent .foldLeft[JsArray](JsArray.empty) { (array, inputTechnique) => val res = db.tryTransaction { implicit graph => createFromInput(inputTechnique) @@ -83,5 +84,16 @@ class TechniqueCtrl @Inject() ( else if (techniqueSrv.startTraversal.alreadyImported(inputTechnique.external_id)) Failure(BadRequestError(s"A technique with MITRE id '${inputTechnique.external_id}' already exists in this organisation")) else - techniqueSrv.createEntity(inputTechnique.toTechnique) + for { + technique <- techniqueSrv.createEntity(inputTechnique.toTechnique) + _ = if (inputTechnique.x_mitre_is_subtechnique.getOrElse(false)) linkTechnique(technique) + } yield technique + + private def linkTechnique(child: Technique with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + val firstDot = child.techniqueId.indexOf(".") + val parentId = child.techniqueId.substring(0, firstDot) + for { + parent <- techniqueSrv.startTraversal.getByTechniqueId(parentId).getOrFail("Technique") + } yield techniqueSrv.setParent(child, parent) + } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index e1f64bf01c..96f4435a2c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -32,6 +32,7 @@ class TheHiveQueryExecutor @Inject() ( profileCtrl: ProfileCtrl, shareCtrl: ShareCtrl, taskCtrl: TaskCtrl, + techniqueCtrl: TechniqueCtrl, userCtrl: UserCtrl, // dashboardCtrl: DashboardCtrl, properties: Properties, @@ -55,6 +56,7 @@ class TheHiveQueryExecutor @Inject() ( shareCtrl, // tagCtrl, taskCtrl, + techniqueCtrl, userCtrl ) diff --git a/thehive/app/org/thp/thehive/models/Technique.scala b/thehive/app/org/thp/thehive/models/Technique.scala index f651db248c..2f37ca0a8c 100644 --- a/thehive/app/org/thp/thehive/models/Technique.scala +++ b/thehive/app/org/thp/thehive/models/Technique.scala @@ -15,7 +15,6 @@ case class Technique( techniqueType: String, platforms: Seq[String], dataSources: Seq[String], - // TODO capec id version: Option[String] ) From 1e5d2469412e5c0ecf69e28055a3fb6cf836f0ad Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Fri, 8 Jan 2021 17:22:44 +0100 Subject: [PATCH 81/93] GET technique ok --- .../app/org/thp/thehive/controllers/v1/Router.scala | 3 ++- .../thp/thehive/controllers/v1/TechniqueCtrl.scala | 11 +++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index c7c340f506..584dec24c4 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -133,7 +133,8 @@ class Router @Inject() ( // POST /audit/_stats controllers.AuditCtrl.stats() // MITRE - case POST(p"/technique/mitre/import") => techniqueCtrl.importMitre + case POST(p"/technique/import/mitre") => techniqueCtrl.importMitre + case GET(p"/technique/$techniqueId") => techniqueCtrl.get(techniqueId) // list procedures // list patterns // list tactics diff --git a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala index e2fa1e85d6..fa85a9a302 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala @@ -72,6 +72,17 @@ class TechniqueCtrl @Inject() ( } yield Results.Created(richTechniques) } + def get(techniqueId: String): Action[AnyContent] = + entrypoint("get technique") + .authRoTransaction(db) { implicit request => implicit graph => + techniqueSrv + .startTraversal + .getByTechniqueId(techniqueId) + .richTechnique + .getOrFail("Technique") + .map(richTechnique => Results.Ok(richTechnique.toJson)) + } + private def parseJsonFile(file: FFile): Try[Seq[InputTechnique]] = for { stream <- Try(new FileInputStream(file.filepath.toString)) From b3551cefab9933b5461941d23fb446dbff70a8fa Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 13 Jan 2021 17:29:58 +0100 Subject: [PATCH 82/93] Renamed Technique to Pattern, added Procedure entity & unit tests --- .../dto/v1/{Technique.scala => Pattern.scala} | 20 ++-- .../org/thp/thehive/dto/v1/Procedure.scala | 45 +++++++ .../thehive/controllers/v1/Conversion.scala | 38 ++++-- .../thehive/controllers/v1/DescribeCtrl.scala | 4 + .../thehive/controllers/v1/PatternCtrl.scala | 111 ++++++++++++++++++ .../controllers/v1/ProcedureCtrl.scala | 55 +++++++++ .../thehive/controllers/v1/Properties.scala | 11 +- .../thp/thehive/controllers/v1/Router.scala | 19 ++- .../controllers/v1/TechniqueCtrl.scala | 110 ----------------- .../controllers/v1/TheHiveQueryExecutor.scala | 6 +- thehive/app/org/thp/thehive/models/Case.scala | 8 +- .../app/org/thp/thehive/models/Pattern.scala | 39 ++++++ .../org/thp/thehive/models/Permissions.scala | 6 +- .../org/thp/thehive/models/Procedure.scala | 27 +++++ .../org/thp/thehive/models/Technique.scala | 39 ------ .../models/TheHiveSchemaDefinition.scala | 8 +- .../org/thp/thehive/services/AuditSrv.scala | 35 +++--- .../org/thp/thehive/services/PatternSrv.scala | 60 ++++++++++ .../thp/thehive/services/ProcedureSrv.scala | 55 +++++++++ .../thp/thehive/services/TechniqueSrv.scala | 58 --------- .../org/thp/thehive/DatabaseBuilder.scala | 70 ++++++----- .../controllers/v1/PatternCtrlTest.scala | 56 +++++++++ .../controllers/v1/ProcedureCtrlTest.scala | 49 ++++++++ .../test/resources/data/CaseProcedure.json | 3 + thehive/test/resources/data/Pattern.json | 17 +++ thehive/test/resources/data/Procedure.json | 7 ++ .../test/resources/data/ProcedurePattern.json | 3 + 27 files changed, 658 insertions(+), 301 deletions(-) rename dto/src/main/scala/org/thp/thehive/dto/v1/{Technique.scala => Pattern.scala} (88%) create mode 100644 dto/src/main/scala/org/thp/thehive/dto/v1/Procedure.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala create mode 100644 thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala delete mode 100644 thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala create mode 100644 thehive/app/org/thp/thehive/models/Pattern.scala create mode 100644 thehive/app/org/thp/thehive/models/Procedure.scala delete mode 100644 thehive/app/org/thp/thehive/models/Technique.scala create mode 100644 thehive/app/org/thp/thehive/services/PatternSrv.scala create mode 100644 thehive/app/org/thp/thehive/services/ProcedureSrv.scala delete mode 100644 thehive/app/org/thp/thehive/services/TechniqueSrv.scala create mode 100644 thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala create mode 100644 thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala create mode 100644 thehive/test/resources/data/CaseProcedure.json create mode 100644 thehive/test/resources/data/Pattern.json create mode 100644 thehive/test/resources/data/Procedure.json create mode 100644 thehive/test/resources/data/ProcedurePattern.json diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala similarity index 88% rename from dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala rename to dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala index 2188685f60..38b2b3a88b 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Technique.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala @@ -4,7 +4,7 @@ import play.api.libs.json.{Format, Json, Reads, Writes} import java.util.Date -case class InputTechnique( +case class InputPattern( external_id: String, name: String, description: Option[String], @@ -58,8 +58,8 @@ object InputKillChainPhase { implicit val writes: Writes[InputKillChainPhase] = Json.writes[InputKillChainPhase] } -object InputTechnique { - implicit val reads: Reads[InputTechnique] = Reads[InputTechnique] { json => +object InputPattern { + implicit val reads: Reads[InputPattern] = Reads[InputPattern] { json => for { references <- (json \ "external_references").validate[Seq[InputReference]] mitreReference = references.find(_.source_name == "mitre-attack") @@ -71,7 +71,7 @@ object InputTechnique { x_mitre_data_sources <- (json \ "x_mitre_data_sources").validateOpt[Seq[String]] x_mitre_is_subtechnique <- (json \ "x_mitre_is_subtechnique").validateOpt[Boolean] x_mitre_version <- (json \ "x_mitre_version").validateOpt[String] - } yield InputTechnique( + } yield InputPattern( mitreReference.map(_.external_id).getOrElse(""), name, description, @@ -85,28 +85,28 @@ object InputTechnique { ) } - implicit val writes: Writes[InputTechnique] = Json.writes[InputTechnique] + implicit val writes: Writes[InputPattern] = Json.writes[InputPattern] } -case class OutputTechnique( +case class OutputPattern( _id: String, _type: String, _createdBy: String, _updatedBy: Option[String], _createdAt: Date, _updatedAt: Option[Date], - techniqueId: String, + patternId: String, name: String, description: Option[String], tactics: Seq[String], url: String, - techniqueType: String, + patternType: String, platforms: Seq[String], dataSources: Seq[String], version: Option[String], parent: Option[String] ) -object OutputTechnique { - implicit val format: Format[OutputTechnique] = Json.format[OutputTechnique] +object OutputPattern { + implicit val format: Format[OutputPattern] = Json.format[OutputPattern] } diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Procedure.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Procedure.scala new file mode 100644 index 0000000000..fc57cd9cb7 --- /dev/null +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Procedure.scala @@ -0,0 +1,45 @@ +package org.thp.thehive.dto.v1 + +import play.api.libs.json.{Format, Json, Reads, Writes} + +import java.util.Date + +case class InputProcedure( + description: String, + occurence: Date, + caseId: String, + patternId: String +) + +object InputProcedure { + implicit val reads: Reads[InputProcedure] = Reads[InputProcedure] { json => + for { + description <- (json \ "description").validate[String] + occurence <- (json \ "occurence").validate[Date] + caseId <- (json \ "caseId").validate[String] + patternId <- (json \ "patternId").validate[String] + } yield InputProcedure( + description, + occurence, + caseId, + patternId + ) + } + + implicit val writes: Writes[InputProcedure] = Json.writes[InputProcedure] +} + +case class OutputProcedure( + _id: String, + _createdAt: Date, + _createdBy: String, + _updatedAt: Option[Date], + _updatedBy: Option[String], + description: String, + occurence: Date, + patternId: String +) + +object OutputProcedure { + implicit val format: Format[OutputProcedure] = Json.format[OutputProcedure] +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 242a5d0204..9058aa3a30 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -458,26 +458,40 @@ object Conversion { .transform } - implicit class InputTechniqueOps(inputTechnique: InputTechnique) { - def toTechnique: Technique = - inputTechnique - .into[Technique] - .withFieldRenamed(_.external_id, _.techniqueId) + implicit class InputPatternOps(inputPattern: InputPattern) { + def toPattern: Pattern = + inputPattern + .into[Pattern] + .withFieldRenamed(_.external_id, _.patternId) .withFieldComputed(_.tactics, _.kill_chain_phases.map(_.phase_name)) - .withFieldRenamed(_.`type`, _.techniqueType) + .withFieldRenamed(_.`type`, _.patternType) .withFieldRenamed(_.x_mitre_platforms, _.platforms) .withFieldRenamed(_.x_mitre_data_sources, _.dataSources) .withFieldRenamed(_.x_mitre_version, _.version) .transform } - implicit val richTechniqueRenderer: Renderer.Aux[RichTechnique, OutputTechnique] = - Renderer.toJson[RichTechnique, OutputTechnique](technique => - technique - .into[OutputTechnique] + implicit val richPatternRenderer: Renderer.Aux[RichPattern, OutputPattern] = + Renderer.toJson[RichPattern, OutputPattern]( + _.into[OutputPattern] .withFieldComputed(_._id, _._id.toString) - .withFieldConst(_._type, "Technique") - .withFieldComputed(_.parent, _.parent.map(_.techniqueId)) + .withFieldConst(_._type, "Pattern") + .withFieldComputed(_.parent, _.parent.map(_.patternId)) + .transform + ) + + implicit class InputProcedureOps(inputProcedure: InputProcedure) { + def toProcedure: Procedure = + inputProcedure + .into[Procedure] + .transform + } + + implicit val richProcedureRenderer: Renderer.Aux[RichProcedure, OutputProcedure] = + Renderer.toJson[RichProcedure, OutputProcedure]( + _.into[OutputProcedure] + .withFieldComputed(_._id, _._id.toString) + .withFieldComputed(_.patternId, _.pattern.patternId) .transform ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala index 6c1c6dc4f9..9438099e24 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala @@ -37,6 +37,8 @@ class DescribeCtrl @Inject() ( observableTypeCtrl: ObservableTypeCtrl, organisationCtrl: OrganisationCtrl, // pageCtrl: PageCtrl, + patternCtrl: PatternCtrl, + procedureCtrl: ProcedureCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, userCtrl: UserCtrl, @@ -107,6 +109,8 @@ class DescribeCtrl @Inject() ( ), EntityDescription("organisation", "listOrganisation", organisationCtrl.publicProperties.list.flatMap(propertyToJson("organisation", _))), // EntityDescription("page", "listPage", pageCtrl.publicProperties.list.flatMap(propertyToJson("page", _))) + EntityDescription("pattern", "listPattern", patternCtrl.publicProperties.list.flatMap(propertyToJson("pattern", _))), + EntityDescription("procedure", "listProcedure", procedureCtrl.publicProperties.list.flatMap(propertyToJson("procedure", _))), EntityDescription("profile", "listProfile", profileCtrl.publicProperties.list.flatMap(propertyToJson("profile", _))), EntityDescription("task", "listTask", taskCtrl.publicProperties.list.flatMap(propertyToJson("case_task", _))), EntityDescription("user", "listUser", userCtrl.publicProperties.list.flatMap(propertyToJson("user", _))) diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala new file mode 100644 index 0000000000..eb0859895a --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -0,0 +1,111 @@ +package org.thp.thehive.controllers.v1 + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.scalligraph.{BadRequestError, EntityIdOrName} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.InputPattern +import org.thp.thehive.models.{Pattern, Permissions, RichPattern} +import org.thp.thehive.services.PatternOps._ +import org.thp.thehive.services.PatternSrv +import play.api.libs.json.{JsArray, Json} +import play.api.mvc.{Action, AnyContent, Results} + +import java.io.FileInputStream +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Failure, Success, Try} + +@Singleton +class PatternCtrl @Inject() ( + entrypoint: Entrypoint, + properties: Properties, + patternSrv: PatternSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + override val entityName: String = "pattern" + override val publicProperties: PublicProperties = properties.pattern + override val initialQuery: Query = Query.init[Traversal.V[Pattern]]( + "listPattern", + (graph, _) => + patternSrv + .startTraversal(graph) + ) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Pattern], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, patternSteps, _) => patternSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richPattern) + ) + override val outputQuery: Query = Query.output[RichPattern, Traversal.V[Pattern]](_.richPattern) + override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Pattern]]( + "getPattern", + FieldsParser[EntityIdOrName], + (idOrName, graph, _) => patternSrv.get(idOrName)(graph) + ) + + def importMitre: Action[AnyContent] = + entrypoint("import MITRE ATT&CK patterns") + .extract("file", FieldsParser.file.on("file")) + .authPermitted(Permissions.managePattern) { implicit request => + val file: FFile = request.body("file") + + for { + inputPatterns <- parseJsonFile(file) + richPatterns = + inputPatterns + .sortBy(_.external_id.length) // sort to create sub-patterns after their parent + .foldLeft[JsArray](JsArray.empty) { (array, inputPattern) => + val res = db.tryTransaction { implicit graph => + createFromInput(inputPattern) + } match { + case Failure(e) => + Json.obj("status" -> "Failure", "message" -> e.getMessage) + case Success(t) => + Json.obj("status" -> "Success", "mitreId" -> t.patternId, "patternName" -> t.name) + } + array :+ res + } + } yield Results.Created(richPatterns) + } + + def get(patternId: String): Action[AnyContent] = + entrypoint("get pattern") + .authRoTransaction(db) { implicit request => implicit graph => + patternSrv + .startTraversal + .getByPatternId(patternId) + .richPattern + .getOrFail("Pattern") + .map(richPattern => Results.Ok(richPattern.toJson)) + } + + private def parseJsonFile(file: FFile): Try[Seq[InputPattern]] = + for { + stream <- Try(new FileInputStream(file.filepath.toString)) + json = Json.parse(stream) + } yield (json \ "objects").get.as[Seq[InputPattern]] + + private def createFromInput(inputPattern: InputPattern)(implicit graph: Graph, authContext: AuthContext): Try[Pattern with Entity] = + if (inputPattern.external_id.isEmpty) + Failure(BadRequestError(s"A pattern with no MITRE id cannot be imported")) + else if (patternSrv.startTraversal.alreadyImported(inputPattern.external_id)) + Failure(BadRequestError(s"A pattern with MITRE id '${inputPattern.external_id}' already exists in this organisation")) + else + for { + pattern <- patternSrv.createEntity(inputPattern.toPattern) + _ = if (inputPattern.x_mitre_is_subtechnique.getOrElse(false)) linkPattern(pattern) + } yield pattern + + private def linkPattern(child: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + val firstDot = child.patternId.indexOf(".") + val parentId = child.patternId.substring(0, firstDot) + for { + parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern") + _ <- patternSrv.setParent(child, parent) + } yield () + } +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala new file mode 100644 index 0000000000..02de6f898f --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala @@ -0,0 +1,55 @@ +package org.thp.thehive.controllers.v1 + +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} +import org.thp.scalligraph.models.Database +import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.dto.v1.InputProcedure +import org.thp.thehive.models.{Permissions, Procedure, RichProcedure} +import org.thp.thehive.services.ProcedureOps._ +import org.thp.thehive.services.ProcedureSrv +import play.api.mvc.{Action, AnyContent, Results} + +import javax.inject.{Inject, Named, Singleton} + +@Singleton +class ProcedureCtrl @Inject() ( + entrypoint: Entrypoint, + properties: Properties, + procedureSrv: ProcedureSrv, + @Named("with-thehive-schema") implicit val db: Database +) extends QueryableCtrl { + override val entityName: String = "procedure" + override val publicProperties: PublicProperties = properties.procedure + override val initialQuery: Query = Query.init[Traversal.V[Procedure]]( + "listProcedure", + (graph, _) => + procedureSrv + .startTraversal(graph) + ) + override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Procedure], IteratorOutput]( + "page", + FieldsParser[OutputParam], + (range, procedureSteps, _) => procedureSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richProcedure) + ) + override val outputQuery: Query = Query.output[RichProcedure, Traversal.V[Procedure]](_.richProcedure) + override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Procedure]]( + "getProcedure", + FieldsParser[EntityIdOrName], + (idOrName, graph, _) => procedureSrv.get(idOrName)(graph) + ) + + def create: Action[AnyContent] = + entrypoint("create procedure") + .extract("procedure", FieldsParser[InputProcedure]) + .authPermittedTransaction(db, Permissions.manageProcedure) { implicit request => implicit graph => + val inputProcedure: InputProcedure = request.body("procedure") + for { + richProcedure <- procedureSrv.create(inputProcedure.toProcedure, inputProcedure.caseId, inputProcedure.patternId) + } yield Results.Created(richProcedure.toJson) + } + +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 2106c2ba07..803e9c2912 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -352,6 +352,13 @@ class Properties @Inject() ( .property("description", UMapping.string)(_.field.updatable) .build + // TODO add fields + lazy val pattern: PublicProperties = + PublicPropertyListBuilder[Pattern].build + + lazy val procedure: PublicProperties = + PublicPropertyListBuilder[Procedure].build + lazy val profile: PublicProperties = PublicPropertyListBuilder[Profile] .property("name", UMapping.string)(_.field.updatable) @@ -460,8 +467,4 @@ class Properties @Inject() ( .property("attachment.contentType", UMapping.string.optional)(_.select(_.attachments.value(_.contentType)).readonly) .property("attachment.id", UMapping.string.optional)(_.select(_.attachments.value(_.attachmentId)).readonly) .build - - lazy val technique: PublicProperties = - PublicPropertyListBuilder[Technique].build - } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 584dec24c4..c695d8a5b6 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -23,14 +23,15 @@ class Router @Inject() ( organisationCtrl: OrganisationCtrl, // pageCtrl: PageCtrl, // permissionCtrl: PermissionCtrl, + patternCtrl: PatternCtrl, + procedureCtrl: ProcedureCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, shareCtrl: ShareCtrl, userCtrl: UserCtrl, - statusCtrl: StatusCtrl, + statusCtrl: StatusCtrl // streamCtrl: StreamCtrl, // tagCtrl: TagCtrl, - techniqueCtrl: TechniqueCtrl ) extends SimpleRouter { override def routes: Routes = { @@ -132,18 +133,12 @@ class Router @Inject() ( // POST /audit/_search controllers.AuditCtrl.find() // POST /audit/_stats controllers.AuditCtrl.stats() - // MITRE - case POST(p"/technique/import/mitre") => techniqueCtrl.importMitre - case GET(p"/technique/$techniqueId") => techniqueCtrl.get(techniqueId) - // list procedures - // list patterns - // list tactics - // list techniques - // list subtechniques - // list by id + case POST(p"/pattern/import/mitre") => patternCtrl.importMitre + case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) + // list tactics // link to pattern // unlink - // fill procedure + case POST(p"/procedure") => procedureCtrl.create // Query : pattern name, pattern id // List pattern id associated to a Case diff --git a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala deleted file mode 100644 index fa85a9a302..0000000000 --- a/thehive/app/org/thp/thehive/controllers/v1/TechniqueCtrl.scala +++ /dev/null @@ -1,110 +0,0 @@ -package org.thp.thehive.controllers.v1 - -import org.apache.tinkerpop.gremlin.structure.Graph -import org.thp.scalligraph.auth.AuthContext -import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser} -import org.thp.scalligraph.models.{Database, Entity} -import org.thp.scalligraph.query.{ParamQuery, PublicProperties, Query} -import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs -import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} -import org.thp.scalligraph.{BadRequestError, EntityIdOrName} -import org.thp.thehive.controllers.v1.Conversion._ -import org.thp.thehive.dto.v1.InputTechnique -import org.thp.thehive.models.{Permissions, RichTechnique, Technique} -import org.thp.thehive.services.TechniqueOps._ -import org.thp.thehive.services.TechniqueSrv -import play.api.libs.json.{JsArray, Json} -import play.api.mvc.{Action, AnyContent, Results} - -import java.io.FileInputStream -import javax.inject.{Inject, Named, Singleton} -import scala.util.{Failure, Success, Try} - -@Singleton -class TechniqueCtrl @Inject() ( - entrypoint: Entrypoint, - properties: Properties, - techniqueSrv: TechniqueSrv, - @Named("with-thehive-schema") implicit val db: Database -) extends QueryableCtrl { - override val entityName: String = "technique" - override val publicProperties: PublicProperties = properties.technique - override val initialQuery: Query = Query.init[Traversal.V[Technique]]( - "listTechnique", - (graph, _) => - techniqueSrv - .startTraversal(graph) - ) - override val pageQuery: ParamQuery[OutputParam] = Query.withParam[OutputParam, Traversal.V[Technique], IteratorOutput]( - "page", - FieldsParser[OutputParam], - (range, techniqueSteps, _) => techniqueSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richTechnique) - ) - override val outputQuery: Query = Query.output[RichTechnique, Traversal.V[Technique]](_.richTechnique) - override val getQuery: ParamQuery[EntityIdOrName] = Query.initWithParam[EntityIdOrName, Traversal.V[Technique]]( - "getTechnique", - FieldsParser[EntityIdOrName], - (idOrName, graph, _) => techniqueSrv.get(idOrName)(graph) - ) - - def importMitre: Action[AnyContent] = - entrypoint("import MITRE ATT&CK techniques") - .extract("file", FieldsParser.file.on("file")) - .authPermitted(Permissions.manageTechnique) { implicit request => - val file: FFile = request.body("file") - - for { - inputTechniques <- parseJsonFile(file) - richTechniques = - inputTechniques - .sortBy(_.external_id.length) // sort to create sub-techniques after their parent - .foldLeft[JsArray](JsArray.empty) { (array, inputTechnique) => - val res = db.tryTransaction { implicit graph => - createFromInput(inputTechnique) - } match { - case Failure(e) => - Json.obj("status" -> "Failure", "message" -> e.getMessage) - case Success(t) => - Json.obj("status" -> "Success", "mitreId" -> t.techniqueId, "techniqueName" -> t.name) - } - array :+ res - } - } yield Results.Created(richTechniques) - } - - def get(techniqueId: String): Action[AnyContent] = - entrypoint("get technique") - .authRoTransaction(db) { implicit request => implicit graph => - techniqueSrv - .startTraversal - .getByTechniqueId(techniqueId) - .richTechnique - .getOrFail("Technique") - .map(richTechnique => Results.Ok(richTechnique.toJson)) - } - - private def parseJsonFile(file: FFile): Try[Seq[InputTechnique]] = - for { - stream <- Try(new FileInputStream(file.filepath.toString)) - json = Json.parse(stream) - } yield (json \ "objects").get.as[Seq[InputTechnique]] - - private def createFromInput(inputTechnique: InputTechnique)(implicit graph: Graph, authContext: AuthContext): Try[Technique with Entity] = - if (inputTechnique.external_id.isEmpty) - Failure(BadRequestError(s"A technique with no MITRE id cannot be imported")) - else if (techniqueSrv.startTraversal.alreadyImported(inputTechnique.external_id)) - Failure(BadRequestError(s"A technique with MITRE id '${inputTechnique.external_id}' already exists in this organisation")) - else - for { - technique <- techniqueSrv.createEntity(inputTechnique.toTechnique) - _ = if (inputTechnique.x_mitre_is_subtechnique.getOrElse(false)) linkTechnique(technique) - } yield technique - - private def linkTechnique(child: Technique with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { - val firstDot = child.techniqueId.indexOf(".") - val parentId = child.techniqueId.substring(0, firstDot) - for { - parent <- techniqueSrv.startTraversal.getByTechniqueId(parentId).getOrFail("Technique") - } yield techniqueSrv.setParent(child, parent) - } -} diff --git a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala index 96f4435a2c..123291655e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TheHiveQueryExecutor.scala @@ -29,10 +29,11 @@ class TheHiveQueryExecutor @Inject() ( observableCtrl: ObservableCtrl, observableTypeCtrl: ObservableTypeCtrl, organisationCtrl: OrganisationCtrl, + patternCtrl: PatternCtrl, + procedureCtrl: ProcedureCtrl, profileCtrl: ProfileCtrl, shareCtrl: ShareCtrl, taskCtrl: TaskCtrl, - techniqueCtrl: TechniqueCtrl, userCtrl: UserCtrl, // dashboardCtrl: DashboardCtrl, properties: Properties, @@ -52,11 +53,12 @@ class TheHiveQueryExecutor @Inject() ( observableTypeCtrl, organisationCtrl, // pageCtrl, + patternCtrl, + procedureCtrl, profileCtrl, shareCtrl, // tagCtrl, taskCtrl, - techniqueCtrl, userCtrl ) diff --git a/thehive/app/org/thp/thehive/models/Case.scala b/thehive/app/org/thp/thehive/models/Case.scala index da04ceff3b..48ad522a2b 100644 --- a/thehive/app/org/thp/thehive/models/Case.scala +++ b/thehive/app/org/thp/thehive/models/Case.scala @@ -77,12 +77,8 @@ case class CaseUser() @BuildEdgeEntity[Case, CaseTemplate] case class CaseCaseTemplate() -@BuildEdgeEntity[Case, Technique] -case class CaseTechnique( - description: Option[String] = None - // date occurence - // date sighting -) +@BuildEdgeEntity[Case, Procedure] +case class CaseProcedure() @BuildVertexEntity @DefineIndex(IndexType.unique, "number") diff --git a/thehive/app/org/thp/thehive/models/Pattern.scala b/thehive/app/org/thp/thehive/models/Pattern.scala new file mode 100644 index 0000000000..5c090c5ea8 --- /dev/null +++ b/thehive/app/org/thp/thehive/models/Pattern.scala @@ -0,0 +1,39 @@ +package org.thp.thehive.models + +import org.thp.scalligraph.models.Entity +import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} + +import java.util.Date + +@BuildVertexEntity +case class Pattern( + patternId: String, + name: String, + description: Option[String], + tactics: Seq[String], + url: String, + patternType: String, + platforms: Seq[String], + dataSources: Seq[String], + version: Option[String] +) + +@BuildEdgeEntity[Pattern, Pattern] +case class PatternPattern() + +case class RichPattern(pattern: Pattern with Entity, parent: Option[Pattern with Entity]) { + def patternId: String = pattern.patternId + def name: String = pattern.name + def description: Option[String] = pattern.description + def tactics: Seq[String] = pattern.tactics + def url: String = pattern.url + def patternType: String = pattern.patternType + def platforms: Seq[String] = pattern.platforms + def dataSources: Seq[String] = pattern.dataSources + def version: Option[String] = pattern.version + def _id: EntityId = pattern._id + def _createdAt: Date = pattern._createdAt + def _createdBy: String = pattern._createdBy + def _updatedAt: Option[Date] = pattern._updatedAt + def _updatedBy: Option[String] = pattern._updatedBy +} diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index 9b049a0745..e889467b6c 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -20,7 +20,8 @@ object Permissions extends Perms { lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation") lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation") lazy val manageObservableTemplate: PermissionDesc = PermissionDesc("manageObservableTemplate", "Manage observable types", "admin") - lazy val manageTechnique: PermissionDesc = PermissionDesc("manageTechnique", "Manage techniques", "admin") + lazy val managePattern: PermissionDesc = PermissionDesc("managePattern", "Manage patterns", "admin") + lazy val manageProcedure: PermissionDesc = PermissionDesc("manageProcedure", "Manage procedures", "admin") lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation") lazy val list: Set[PermissionDesc] = @@ -42,7 +43,8 @@ object Permissions extends Perms { manageAnalyse, managePage, manageObservableTemplate, - manageTechnique, + managePattern, + manageProcedure, accessTheHiveFS ) diff --git a/thehive/app/org/thp/thehive/models/Procedure.scala b/thehive/app/org/thp/thehive/models/Procedure.scala new file mode 100644 index 0000000000..8d587f69d2 --- /dev/null +++ b/thehive/app/org/thp/thehive/models/Procedure.scala @@ -0,0 +1,27 @@ +package org.thp.thehive.models + +import org.thp.scalligraph.models.Entity +import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} + +import java.util.Date + +@BuildVertexEntity +case class Procedure( + description: String, + occurence: Date + // metadata +) + +@BuildEdgeEntity[Procedure, Pattern] +case class ProcedurePattern() + +case class RichProcedure(procedure: Procedure with Entity, pattern: Pattern with Entity) { + def description: String = procedure.description + def occurence: Date = procedure.occurence + def _id: EntityId = procedure._id + def _createdAt: Date = procedure._createdAt + def _createdBy: String = procedure._createdBy + def _updatedAt: Option[Date] = procedure._updatedAt + def _updatedBy: Option[String] = procedure._updatedBy + +} diff --git a/thehive/app/org/thp/thehive/models/Technique.scala b/thehive/app/org/thp/thehive/models/Technique.scala deleted file mode 100644 index 2f37ca0a8c..0000000000 --- a/thehive/app/org/thp/thehive/models/Technique.scala +++ /dev/null @@ -1,39 +0,0 @@ -package org.thp.thehive.models - -import org.thp.scalligraph.models.Entity -import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} - -import java.util.Date - -@BuildVertexEntity -case class Technique( - techniqueId: String, - name: String, - description: Option[String], - tactics: Seq[String], - url: String, - techniqueType: String, - platforms: Seq[String], - dataSources: Seq[String], - version: Option[String] -) - -@BuildEdgeEntity[Technique, Technique] -case class TechniqueTechnique() - -case class RichTechnique(technique: Technique with Entity, parent: Option[Technique with Entity]) { - def techniqueId: String = technique.techniqueId - def name: String = technique.name - def description: Option[String] = technique.description - def tactics: Seq[String] = technique.tactics - def url: String = technique.url - def techniqueType: String = technique.techniqueType - def platforms: Seq[String] = technique.platforms - def dataSources: Seq[String] = technique.dataSources - def version: Option[String] = technique.version - def _id: EntityId = technique._id - def _createdAt: Date = technique._createdAt - def _createdBy: String = technique._createdBy - def _updatedAt: Option[Date] = technique._updatedAt - def _updatedBy: Option[String] = technique._updatedBy -} diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 4913d3617f..0cef21f50b 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -89,8 +89,12 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { traversal.outE[ShareTask].raw.property("actionRequired", false).iterate() Success(()) } - .updateGraph("Add manageTechnique permission to admin profile", "Profile") { traversal => - traversal.unsafeHas("name", "admin").raw.property("permissions", "manageTechnique").iterate() + .updateGraph("Add managePattern permission to admin profile", "Profile") { traversal => + traversal.unsafeHas("name", "admin").raw.property("permissions", "managePattern").iterate() + Success(()) + } + .updateGraph("Add manageProcedure permission to admin profile", "Profile") { traversal => + traversal.unsafeHas("name", "admin").raw.property("permissions", "manageProcedure").iterate() Success(()) } diff --git a/thehive/app/org/thp/thehive/services/AuditSrv.scala b/thehive/app/org/thp/thehive/services/AuditSrv.scala index da4cd7981e..15faa169e6 100644 --- a/thehive/app/org/thp/thehive/services/AuditSrv.scala +++ b/thehive/app/org/thp/thehive/services/AuditSrv.scala @@ -32,25 +32,26 @@ class AuditSrv @Inject() ( )(implicit @Named("with-thehive-schema") db: Database) extends VertexSrv[Audit] { auditSrv => lazy val userSrv: UserSrv = userSrvProvider.get - val auditUserSrv = new EdgeSrv[AuditUser, Audit, User] + val alert = new SelfContextObjectAudit[Alert] + val alertToCase = new ObjectAudit[Alert, Case] val auditedSrv = new EdgeSrv[Audited, Audit, Product] val auditContextSrv = new EdgeSrv[AuditContext, Audit, Product] + val auditUserSrv = new EdgeSrv[AuditUser, Audit, User] val `case` = new SelfContextObjectAudit[Case] - val task = new SelfContextObjectAudit[Task] - val observable = new SelfContextObjectAudit[Observable] - val log = new ObjectAudit[Log, Task] val caseTemplate = new SelfContextObjectAudit[CaseTemplate] - val taskInTemplate = new ObjectAudit[Task, CaseTemplate] - val alert = new SelfContextObjectAudit[Alert] - val alertToCase = new ObjectAudit[Alert, Case] - val share = new ShareAudit - val observableInAlert = new ObjectAudit[Observable, Alert] - val user = new UserAudit + val customField = new SelfContextObjectAudit[CustomField] val dashboard = new SelfContextObjectAudit[Dashboard] + val log = new ObjectAudit[Log, Task] + val observable = new SelfContextObjectAudit[Observable] + val observableInAlert = new ObjectAudit[Observable, Alert] val organisation = new SelfContextObjectAudit[Organisation] - val profile = new SelfContextObjectAudit[Profile] - val customField = new SelfContextObjectAudit[CustomField] val page = new SelfContextObjectAudit[Page] + val procedure = new SelfContextObjectAudit[Procedure] + val profile = new SelfContextObjectAudit[Profile] + val share = new ShareAudit + val task = new SelfContextObjectAudit[Task] + val taskInTemplate = new ObjectAudit[Task, CaseTemplate] + val user = new UserAudit private val pendingAuditsLock = new Object private val transactionAuditIdsLock = new Object private val unauditedTransactionsLock = new Object @@ -173,7 +174,10 @@ class AuditSrv @Inject() ( def delete(entity: E with Entity, context: Option[C with Entity])(implicit graph: Graph, authContext: AuthContext): Try[Unit] = auditSrv.create(Audit(Audit.delete, entity, None), context, None) - def merge(entity: E with Entity, destination: C with Entity, details: Option[JsObject] = None)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + def merge(entity: E with Entity, destination: C with Entity, details: Option[JsObject] = None)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = auditSrv.create(Audit(Audit.merge, destination, details.map(_.toString())), Some(destination), Some(destination)) } @@ -186,7 +190,10 @@ class AuditSrv @Inject() ( if (details == JsObject.empty) Success(()) else auditSrv.create(Audit(Audit.update, entity, Some(details.toString)), Some(entity), Some(entity)) - def delete(entity: E with Entity, context: Product with Entity, details: Option[JsObject] = None)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + def delete(entity: E with Entity, context: Product with Entity, details: Option[JsObject] = None)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = auditSrv.create(Audit(Audit.delete, entity, details.map(_.toString())), Some(context), None) } diff --git a/thehive/app/org/thp/thehive/services/PatternSrv.scala b/thehive/app/org/thp/thehive/services/PatternSrv.scala new file mode 100644 index 0000000000..1c4e63d50a --- /dev/null +++ b/thehive/app/org/thp/thehive/services/PatternSrv.scala @@ -0,0 +1,60 @@ +package org.thp.thehive.services + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.{Database, Entity} +import org.thp.scalligraph.services._ +import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.traversal.{Converter, Traversal} +import org.thp.thehive.models._ +import org.thp.thehive.services.PatternOps._ + +import java.util.{Map => JMap} +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Success, Try} + +@Singleton +class PatternSrv @Inject() ()(implicit + @Named("with-thehive-schema") db: Database +) extends VertexSrv[Pattern] { + val patternPatternSrv = new EdgeSrv[PatternPattern, Pattern, Pattern] + + def parentExists(child: Pattern with Entity, parent: Pattern with Entity)(implicit graph: Graph): Boolean = + child._id == parent._id || get(child).parent.getEntity(parent).exists + + def setParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] = + if (parentExists(child, parent)) Success(()) + else patternPatternSrv.create(PatternPattern(), child, parent).map(_ => ()) + + override def getByName(name: String)(implicit graph: Graph): Traversal.V[Pattern] = + Try(startTraversal.getByPatternId(name)).getOrElse(startTraversal.limit(0)) + +} + +object PatternOps { + implicit class PatternOpsDefs(traversal: Traversal.V[Pattern]) { + def get(idOrName: EntityIdOrName): Traversal.V[Pattern] = + idOrName.fold(traversal.getByIds(_), _ => traversal.limit(0)) + + def getByPatternId(patternId: String): Traversal.V[Pattern] = traversal.has(_.patternId, patternId) + + def parent: Traversal.V[Pattern] = + traversal.in[PatternPattern].v[Pattern] + + def alreadyImported(patternId: String): Boolean = + traversal.getByPatternId(patternId).exists + + def richPattern: Traversal[RichPattern, JMap[String, Any], Converter[RichPattern, JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.in[PatternPattern].v[Pattern].fold) + ) + .domainMap { + case (pattern, parent) => + RichPattern(pattern, parent.headOption) + } + + } +} diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala new file mode 100644 index 0000000000..65be83e721 --- /dev/null +++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala @@ -0,0 +1,55 @@ +package org.thp.thehive.services + +import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.Database +import org.thp.scalligraph.services._ +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.scalligraph.traversal.{Converter, StepLabel, Traversal} +import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.models._ + +import java.util.{Map => JMap} +import javax.inject.{Inject, Named, Singleton} +import scala.util.Try + +@Singleton +class ProcedureSrv @Inject() ( + auditSrv: AuditSrv, + caseSrv: CaseSrv, + patternSrv: PatternSrv +)(implicit + @Named("with-thehive-schema") db: Database +) extends VertexSrv[Procedure] { + val caseProcedureSrv = new EdgeSrv[CaseProcedure, Case, Procedure] + val procedurePatternSrv = new EdgeSrv[ProcedurePattern, Procedure, Pattern] + + def create(p: Procedure, caseId: String, patternId: String)(implicit graph: Graph, authContext: AuthContext): Try[RichProcedure] = + for { + caze <- caseSrv.getOrFail(EntityIdOrName(caseId)) + pattern <- patternSrv.getOrFail(EntityIdOrName(patternId)) + procedure <- createEntity(p) + _ <- caseProcedureSrv.create(CaseProcedure(), caze, procedure) + _ <- procedurePatternSrv.create(ProcedurePattern(), procedure, pattern) + richProcedure = RichProcedure(procedure, pattern) + _ <- auditSrv.procedure.create(procedure, richProcedure.toJson) + } yield richProcedure + +} + +object ProcedureOps { + implicit class ProcedureOpsDefs(traversal: Traversal.V[Procedure]) { + def richProcedure: Traversal[RichProcedure, JMap[String, Any], Converter[RichProcedure, JMap[String, Any]]] = { + val procedure = StepLabel.v[Procedure] + val pattern = StepLabel.v[Pattern] + traversal + .as(procedure) + .in[ProcedurePattern] + .v[Pattern] + .as(pattern) + .select((procedure, pattern)) + .domainMap { case (procedure, pattern) => RichProcedure(procedure, pattern) } + } + } +} diff --git a/thehive/app/org/thp/thehive/services/TechniqueSrv.scala b/thehive/app/org/thp/thehive/services/TechniqueSrv.scala deleted file mode 100644 index d35aca6b9f..0000000000 --- a/thehive/app/org/thp/thehive/services/TechniqueSrv.scala +++ /dev/null @@ -1,58 +0,0 @@ -package org.thp.thehive.services - -import org.apache.tinkerpop.gremlin.structure.Graph -import org.thp.scalligraph.EntityIdOrName -import org.thp.scalligraph.auth.AuthContext -import org.thp.scalligraph.models.{Database, Entity} -import org.thp.scalligraph.services._ -import org.thp.scalligraph.traversal.TraversalOps._ -import org.thp.scalligraph.traversal.{Converter, Traversal} -import org.thp.thehive.models._ -import org.thp.thehive.services.TechniqueOps._ - -import java.util.{Map => JMap} -import javax.inject.{Inject, Named, Singleton} -import scala.util.{Success, Try} - -@Singleton -class TechniqueSrv @Inject() ()(implicit - @Named("with-thehive-schema") db: Database -) extends VertexSrv[Technique] { - val techniqueTechniqueSrv = new EdgeSrv[TechniqueTechnique, Technique, Technique] - - def parentExists(child: Technique with Entity, parent: Technique with Entity)(implicit graph: Graph): Boolean = - child._id == parent._id || get(child).parent.getEntity(parent).exists - - def setParent(child: Technique with Entity, parent: Technique with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] = - if (parentExists(child, parent)) Success(()) - else techniqueTechniqueSrv.create(TechniqueTechnique(), child, parent).map(_ => ()) - -} - -object TechniqueOps { - implicit class TechniqueOpsDefs(traversal: Traversal.V[Technique]) { - - def get(idOrName: EntityIdOrName): Traversal.V[Technique] = - idOrName.fold(traversal.getByIds(_), _ => traversal.limit(0)) - - def getByTechniqueId(techniqueId: String): Traversal.V[Technique] = traversal.has(_.techniqueId, techniqueId) - - def parent: Traversal.V[Technique] = - traversal.in[TechniqueTechnique].v[Technique] - - def alreadyImported(techniqueId: String): Boolean = - traversal.getByTechniqueId(techniqueId).exists - - def richTechnique: Traversal[RichTechnique, JMap[String, Any], Converter[RichTechnique, JMap[String, Any]]] = - traversal - .project( - _.by - .by(_.in[TechniqueTechnique].v[Technique].fold) - ) - .domainMap { - case (technique, parent) => - RichTechnique(technique, parent.headOption) - } - - } -} diff --git a/thehive/test/org/thp/thehive/DatabaseBuilder.scala b/thehive/test/org/thp/thehive/DatabaseBuilder.scala index 51767a822f..73c4f6ad59 100644 --- a/thehive/test/org/thp/thehive/DatabaseBuilder.scala +++ b/thehive/test/org/thp/thehive/DatabaseBuilder.scala @@ -22,27 +22,29 @@ import scala.util.{Failure, Success, Try} @Singleton class DatabaseBuilder @Inject() ( schema: Schema, - userSrv: UserSrv, - organisationSrv: OrganisationSrv, - profileSrv: ProfileSrv, + alertSrv: AlertSrv, + attachmentSrv: AttachmentSrv, caseSrv: CaseSrv, - customFieldSrv: CustomFieldSrv, caseTemplateSrv: CaseTemplateSrv, + customFieldSrv: CustomFieldSrv, + dashboardSrv: DashboardSrv, + dataSrv: DataSrv, impactStatusSrv: ImpactStatusSrv, - resolutionStatusSrv: ResolutionStatusSrv, - shareSrv: ShareSrv, - roleSrv: RoleSrv, - observableSrv: ObservableSrv, - observableTypeSrv: ObservableTypeSrv, - taskSrv: TaskSrv, - tagSrv: TagSrv, keyValueSrv: KeyValueSrv, - dataSrv: DataSrv, logSrv: LogSrv, - alertSrv: AlertSrv, - attachmentSrv: AttachmentSrv, - dashboardSrv: DashboardSrv, + observableSrv: ObservableSrv, + observableTypeSrv: ObservableTypeSrv, + organisationSrv: OrganisationSrv, pageSrv: PageSrv, + patternSrv: PatternSrv, + procedureSrv: ProcedureSrv, + profileSrv: ProfileSrv, + resolutionStatusSrv: ResolutionStatusSrv, + roleSrv: RoleSrv, + shareSrv: ShareSrv, + tagSrv: TagSrv, + taskSrv: TaskSrv, + userSrv: UserSrv, integrityChecks: Set[GenIntegrityCheckOps] ) { @@ -64,26 +66,28 @@ class DatabaseBuilder @Inject() ( db.tryTransaction { implicit graph => val idMap = createVertex(caseSrv, FieldsParser[Case]) ++ - createVertex(userSrv, FieldsParser[User]) ++ - createVertex(customFieldSrv, FieldsParser[CustomField]) ++ - createVertex(organisationSrv, FieldsParser[Organisation]) ++ + createVertex(alertSrv, FieldsParser[Alert]) ++ + createVertex(attachmentSrv, FieldsParser[Attachment]) ++ createVertex(caseTemplateSrv, FieldsParser[CaseTemplate]) ++ - createVertex(shareSrv, FieldsParser[Share]) ++ - createVertex(roleSrv, FieldsParser[Role]) ++ - createVertex(profileSrv, FieldsParser[Profile]) ++ - createVertex(observableSrv, FieldsParser[Observable]) ++ - createVertex(observableTypeSrv, FieldsParser[ObservableType]) ++ - createVertex(taskSrv, FieldsParser[Task]) ++ - createVertex(keyValueSrv, FieldsParser[KeyValue]) ++ + createVertex(customFieldSrv, FieldsParser[CustomField]) ++ + createVertex(dashboardSrv, FieldsParser[Dashboard]) ++ createVertex(dataSrv, FieldsParser[Data]) ++ + createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++ + createVertex(keyValueSrv, FieldsParser[KeyValue]) ++ createVertex(logSrv, FieldsParser[Log]) ++ - createVertex(alertSrv, FieldsParser[Alert]) ++ + createVertex(observableSrv, FieldsParser[Observable]) ++ + createVertex(observableTypeSrv, FieldsParser[ObservableType]) ++ + createVertex(organisationSrv, FieldsParser[Organisation]) ++ + createVertex(pageSrv, FieldsParser[Page]) ++ + createVertex(patternSrv, FieldsParser[Pattern]) ++ + createVertex(procedureSrv, FieldsParser[Procedure]) ++ + createVertex(profileSrv, FieldsParser[Profile]) ++ createVertex(resolutionStatusSrv, FieldsParser[ResolutionStatus]) ++ - createVertex(impactStatusSrv, FieldsParser[ImpactStatus]) ++ - createVertex(attachmentSrv, FieldsParser[Attachment]) ++ + createVertex(roleSrv, FieldsParser[Role]) ++ + createVertex(shareSrv, FieldsParser[Share]) ++ createVertex(tagSrv, FieldsParser[Tag]) ++ - createVertex(pageSrv, FieldsParser[Page]) ++ - createVertex(dashboardSrv, FieldsParser[Dashboard]) + createVertex(taskSrv, FieldsParser[Task]) ++ + createVertex(userSrv, FieldsParser[User]) createEdge(organisationSrv.organisationOrganisationSrv, organisationSrv, organisationSrv, FieldsParser[OrganisationOrganisation], idMap) createEdge(organisationSrv.organisationShareSrv, organisationSrv, shareSrv, FieldsParser[OrganisationShare], idMap) @@ -131,6 +135,12 @@ class DatabaseBuilder @Inject() ( createEdge(dashboardSrv.dashboardUserSrv, dashboardSrv, userSrv, FieldsParser[DashboardUser], idMap) createEdge(dashboardSrv.organisationDashboardSrv, organisationSrv, dashboardSrv, FieldsParser[OrganisationDashboard], idMap) + + createEdge(patternSrv.patternPatternSrv, patternSrv, patternSrv, FieldsParser[PatternPattern], idMap) + + createEdge(procedureSrv.caseProcedureSrv, caseSrv, procedureSrv, FieldsParser[CaseProcedure], idMap) + createEdge(procedureSrv.procedurePatternSrv, procedureSrv, patternSrv, FieldsParser[ProcedurePattern], idMap) + Success(()) } } diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala new file mode 100644 index 0000000000..50cefe09fb --- /dev/null +++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala @@ -0,0 +1,56 @@ +package org.thp.thehive.controllers.v1 + +import io.scalaland.chimney.dsl._ + +import org.thp.thehive.TestAppBuilder +import org.thp.thehive.dto.v1.OutputPattern +import play.api.test.{FakeRequest, PlaySpecification} + +case class TestPattern( + patternId: String, + name: String, + description: Option[String], + tactics: Seq[String], + url: String, + patternType: String, + platforms: Seq[String], + dataSources: Seq[String], + version: Option[String] +) + +object TestPattern { + def apply(outputPattern: OutputPattern): TestPattern = + outputPattern.into[TestPattern].transform +} + +class PatternCtrlTest extends PlaySpecification with TestAppBuilder { + "pattern controller" should { + // TODO + /* + "import json patterns" in testApp { app => + + } + */ + + "get a existing pattern" in testApp { app => + val request = FakeRequest("GET", "/api/v1/pattern/T123") + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[PatternCtrl].get("T123")(request) + status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}") + val resultPattern = contentAsJson(result).as[OutputPattern] + + TestPattern(resultPattern) must_=== TestPattern( + "T123", + "testPattern1", + Some("The testPattern 1"), + Seq("testTactic1", "testTactic2"), + "http://test.pattern.url", + "unit-test", + Seq(), + Seq(), + Some("1.0") + ) + } + } +} diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala new file mode 100644 index 0000000000..3456edf8a4 --- /dev/null +++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala @@ -0,0 +1,49 @@ +package org.thp.thehive.controllers.v1 + +import io.scalaland.chimney.dsl.TransformerOps +import org.thp.thehive.TestAppBuilder +import org.thp.thehive.dto.v1.{InputProcedure, OutputProcedure} +import play.api.libs.json.Json +import play.api.test.{FakeRequest, PlaySpecification} + +import java.util.Date + +case class TestProcedure( + description: String, + occurence: Date, + patternId: String +) + +object TestProcedure { + def apply(outputProcedure: OutputProcedure): TestProcedure = + outputProcedure.into[TestProcedure].transform +} + +class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { + "procedure controller" should { + "create a valid procedure" in testApp { app => + val procedureDate = new Date() + val inputProcedure = InputProcedure( + "testProcedure2", + procedureDate, + "1", + "T123" + ) + + val request = FakeRequest("POST", "/api/v1/procedure") + .withJsonBody(Json.toJson(inputProcedure)) + .withHeaders("user" -> "admin@thehive.local") + + val result = app[ProcedureCtrl].create(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + + val resultProcedure = contentAsJson(result).as[OutputProcedure] + + TestProcedure(resultProcedure) must_=== TestProcedure( + "testProcedure2", + procedureDate, + "T123" + ) + } + } +} diff --git a/thehive/test/resources/data/CaseProcedure.json b/thehive/test/resources/data/CaseProcedure.json new file mode 100644 index 0000000000..f37bf68e40 --- /dev/null +++ b/thehive/test/resources/data/CaseProcedure.json @@ -0,0 +1,3 @@ +[ + {"from": "case1", "to": "testProcedure1"} +] \ No newline at end of file diff --git a/thehive/test/resources/data/Pattern.json b/thehive/test/resources/data/Pattern.json new file mode 100644 index 0000000000..593dbd169a --- /dev/null +++ b/thehive/test/resources/data/Pattern.json @@ -0,0 +1,17 @@ +[ + { + "id": "testPattern1", + "patternId": "T123", + "name": "testPattern1", + "description": "The testPattern 1", + "tactics": [ + "testTactic1", + "testTactic2" + ], + "url": "http://test.pattern.url", + "patternType": "unit-test", + "platforms": [], + "dataSources": [], + "version": "1.0" + } +] \ No newline at end of file diff --git a/thehive/test/resources/data/Procedure.json b/thehive/test/resources/data/Procedure.json new file mode 100644 index 0000000000..abc17a14db --- /dev/null +++ b/thehive/test/resources/data/Procedure.json @@ -0,0 +1,7 @@ +[ + { + "id": "testProcedure1", + "description": "The testProcedure 1", + "occurence": 1531667370000 + } +] \ No newline at end of file diff --git a/thehive/test/resources/data/ProcedurePattern.json b/thehive/test/resources/data/ProcedurePattern.json new file mode 100644 index 0000000000..f8c6c9b5a0 --- /dev/null +++ b/thehive/test/resources/data/ProcedurePattern.json @@ -0,0 +1,3 @@ +[ + {"from": "testProcedure1", "to": "testPattern1"} +] \ No newline at end of file From e192f529535ba848b942398a09b9fc99cd9407e2 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 14 Jan 2021 14:15:27 +0100 Subject: [PATCH 83/93] Added procedure delete & unit tests --- .../thehive/controllers/v0/Conversion.scala | 1 + .../controllers/v1/ProcedureCtrl.scala | 19 +++++++++++++++++++ .../thp/thehive/controllers/v1/Router.scala | 9 ++++----- .../thp/thehive/services/ProcedureSrv.scala | 9 ++++++++- .../controllers/v1/ProcedureCtrlTest.scala | 17 +++++++++++++++++ 5 files changed, 49 insertions(+), 6 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala index f972afd972..ff86a24839 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Conversion.scala @@ -344,6 +344,7 @@ object Conversion { implicit val reportTagWrites: Writes[ReportTag] = Writes[ReportTag] { tag => Json.obj("level" -> tag.level.toString, "namespace" -> tag.namespace, "predicate" -> tag.predicate, "value" -> tag.value) } + implicit val observableOutput: Renderer.Aux[RichObservable, OutputObservable] = Renderer.toJson[RichObservable, OutputObservable]( _.into[OutputObservable] .withFieldConst(_._type, "case_artifact") diff --git a/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala index 02de6f898f..ba10c6ce90 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ProcedureCtrl.scala @@ -52,4 +52,23 @@ class ProcedureCtrl @Inject() ( } yield Results.Created(richProcedure.toJson) } + def get(procedureId: String): Action[AnyContent] = + entrypoint("get procedure") + .authRoTransaction(db) { _ => implicit graph => + procedureSrv + .get(EntityIdOrName(procedureId)) + .richProcedure + .getOrFail("Procedure") + .map(richProcedure => Results.Ok(richProcedure.toJson)) + } + + def delete(procedureId: String): Action[AnyContent] = + entrypoint("delete procedure") + .authPermittedTransaction(db, Permissions.manageProcedure) { implicit request => implicit graph => + procedureSrv + .getOrFail(EntityIdOrName(procedureId)) + .flatMap(procedureSrv.remove) + .map(_ => Results.NoContent) + } + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index c695d8a5b6..4a8c038a4e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -135,11 +135,10 @@ class Router @Inject() ( case POST(p"/pattern/import/mitre") => patternCtrl.importMitre case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) - // list tactics - // link to pattern - // unlink - case POST(p"/procedure") => procedureCtrl.create - // Query : pattern name, pattern id + + case POST(p"/procedure") => procedureCtrl.create + case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId) + case DELETE(p"/procedure/$procedureId") => procedureCtrl.delete(procedureId) // List pattern id associated to a Case case POST(p"/profile") => profileCtrl.create diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala index 65be83e721..32b574e040 100644 --- a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala +++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala @@ -3,7 +3,7 @@ package org.thp.thehive.services import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.EntityIdOrName import org.thp.scalligraph.auth.AuthContext -import org.thp.scalligraph.models.Database +import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.services._ import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{Converter, StepLabel, Traversal} @@ -18,6 +18,7 @@ import scala.util.Try class ProcedureSrv @Inject() ( auditSrv: AuditSrv, caseSrv: CaseSrv, + organisationSrv: OrganisationSrv, patternSrv: PatternSrv )(implicit @Named("with-thehive-schema") db: Database @@ -36,6 +37,12 @@ class ProcedureSrv @Inject() ( _ <- auditSrv.procedure.create(procedure, richProcedure.toJson) } yield richProcedure + def remove(procedure: Procedure with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + for { + organisation <- organisationSrv.getOrFail(authContext.organisation) + _ <- auditSrv.procedure.delete(procedure, organisation) + } yield get(procedure).remove() + } object ProcedureOps { diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala index 3456edf8a4..f8c88b4ce4 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala @@ -45,5 +45,22 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { "T123" ) } + + "delete a procedure" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/procedure/testProcedure1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[ProcedureCtrl].get("testProcedure1")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") + + val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure1") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[ProcedureCtrl].delete("testProcedure1")(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") + + val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[ProcedureCtrl].get("testProcedure1")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } } } From bb213f2de99d03dd18153841d9e54109f3604692 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Thu, 14 Jan 2021 17:25:37 +0100 Subject: [PATCH 84/93] Fixed procedure delete --- .../thehive/controllers/v1/PatternCtrl.scala | 3 +-- .../thp/thehive/controllers/v1/Router.scala | 2 ++ .../thp/thehive/services/ProcedureSrv.scala | 3 +++ .../controllers/v1/ProcedureCtrlTest.scala | 27 +++++++++++++------ 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala index eb0859895a..e8f5b6f802 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -76,8 +76,7 @@ class PatternCtrl @Inject() ( entrypoint("get pattern") .authRoTransaction(db) { implicit request => implicit graph => patternSrv - .startTraversal - .getByPatternId(patternId) + .get(EntityIdOrName(patternId)) .richPattern .getOrFail("Pattern") .map(richPattern => Results.Ok(richPattern.toJson)) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 4a8c038a4e..ee9e3736fc 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -140,6 +140,8 @@ class Router @Inject() ( case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId) case DELETE(p"/procedure/$procedureId") => procedureCtrl.delete(procedureId) // List pattern id associated to a Case + // Query filter by pattern name + // Query filter bu pattern Id case POST(p"/profile") => profileCtrl.create case GET(p"/profile/$profileId") => profileCtrl.get(profileId) diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala index 32b574e040..2015b75bca 100644 --- a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala +++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala @@ -37,6 +37,9 @@ class ProcedureSrv @Inject() ( _ <- auditSrv.procedure.create(procedure, richProcedure.toJson) } yield richProcedure + override def get(idOrName: EntityIdOrName)(implicit graph: Graph): Traversal.V[Procedure] = + idOrName.fold(getByIds(_), _ => startTraversal.limit(0)) + def remove(procedure: Procedure with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { organisation <- organisationSrv.getOrFail(authContext.organisation) diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala index f8c88b4ce4..5ab405d449 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala @@ -47,19 +47,30 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { } "delete a procedure" in testApp { app => - val request1 = FakeRequest("GET", "/api/v1/procedure/testProcedure1") - .withHeaders("user" -> "certuser@thehive.local") - val result1 = app[ProcedureCtrl].get("testProcedure1")(request1) - status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") + val request1 = FakeRequest("POST", "/api/v1/procedure/testProcedure2") + .withJsonBody( + Json.toJson( + InputProcedure( + "testProcedure2", + new Date(), + "1", + "T123" + ) + ) + ) + .withHeaders("user" -> "admin@thehive.local") + val result1 = app[ProcedureCtrl].create(request1) + val procedureId = contentAsJson(result1).as[OutputProcedure]._id + status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}") - val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure1") + val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure2") .withHeaders("user" -> "admin@thehive.local") - val result2 = app[ProcedureCtrl].delete("testProcedure1")(request2) + val result2 = app[ProcedureCtrl].delete(procedureId)(request2) status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure1") + val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure2") .withHeaders("user" -> "certuser@thehive.local") - val result3 = app[ProcedureCtrl].get("testProcedure1")(request3) + val result3 = app[ProcedureCtrl].get(procedureId)(request3) status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") } } From 645ba9cb5142384931df5142ef99ddcc06606559 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Fri, 15 Jan 2021 14:32:29 +0100 Subject: [PATCH 85/93] Added pattern delete, query properties & unit tests --- .../thehive/controllers/v1/PatternCtrl.scala | 9 +++++++ .../thehive/controllers/v1/Properties.scala | 26 +++++++++++++------ .../thp/thehive/controllers/v1/Router.scala | 1 + .../org/thp/thehive/services/AuditSrv.scala | 1 + .../org/thp/thehive/services/PatternSrv.scala | 11 +++++++- .../controllers/v1/PatternCtrlTest.scala | 18 +++++++++++++ 6 files changed, 57 insertions(+), 9 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala index e8f5b6f802..6dd894e93c 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -82,6 +82,15 @@ class PatternCtrl @Inject() ( .map(richPattern => Results.Ok(richPattern.toJson)) } + def delete(patternId: String): Action[AnyContent] = + entrypoint("delete pattern") + .authPermittedTransaction(db, Permissions.managePattern) { implicit request => implicit graph => + patternSrv + .getOrFail(EntityIdOrName(patternId)) + .flatMap(patternSrv.remove) + .map(_ => Results.NoContent) + } + private def parseJsonFile(file: FFile): Try[Seq[InputPattern]] = for { stream <- Try(new FileInputStream(file.filepath.toString)) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 803e9c2912..f6783981be 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -1,9 +1,5 @@ package org.thp.thehive.controllers.v1 -import java.lang.{Long => JLong} -import java.util.Date - -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{FPathElem, FPathEmpty} import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query.{PublicProperties, PublicPropertyListBuilder} @@ -19,13 +15,15 @@ import org.thp.thehive.services.CustomFieldOps._ import org.thp.thehive.services.LogOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ +import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.TagOps._ import org.thp.thehive.services.TaskOps._ -import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.libs.json.{JsObject, JsValue, Json} +import java.util.Date +import javax.inject.{Inject, Named, Singleton} import scala.util.Failure @Singleton @@ -352,12 +350,24 @@ class Properties @Inject() ( .property("description", UMapping.string)(_.field.updatable) .build - // TODO add fields lazy val pattern: PublicProperties = - PublicPropertyListBuilder[Pattern].build + PublicPropertyListBuilder[Pattern] + .property("patternId", UMapping.string)(_.field.readonly) + .property("name", UMapping.string)(_.field.readonly) + .property("description", UMapping.string.optional)(_.field.updatable) + .property("tactics", UMapping.string.sequence)(_.field.readonly) + .property("url", UMapping.string)(_.field.updatable) + .property("patternType", UMapping.string)(_.field.readonly) + .property("platforms", UMapping.string.sequence)(_.field.readonly) + .property("dataSources", UMapping.string.sequence)(_.field.readonly) + .property("version", UMapping.string.optional)(_.field.readonly) + .build lazy val procedure: PublicProperties = - PublicPropertyListBuilder[Procedure].build + PublicPropertyListBuilder[Procedure] + .property("description", UMapping.string)(_.field.updatable) + .property("description", UMapping.string)(_.field.readonly) + .build lazy val profile: PublicProperties = PublicPropertyListBuilder[Profile] diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index ee9e3736fc..91cf7fd582 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -135,6 +135,7 @@ class Router @Inject() ( case POST(p"/pattern/import/mitre") => patternCtrl.importMitre case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) + case DELETE(p"/pattern/$patternId") => patternCtrl.delete(patternId) case POST(p"/procedure") => procedureCtrl.create case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId) diff --git a/thehive/app/org/thp/thehive/services/AuditSrv.scala b/thehive/app/org/thp/thehive/services/AuditSrv.scala index 15faa169e6..c020a2969d 100644 --- a/thehive/app/org/thp/thehive/services/AuditSrv.scala +++ b/thehive/app/org/thp/thehive/services/AuditSrv.scala @@ -46,6 +46,7 @@ class AuditSrv @Inject() ( val observableInAlert = new ObjectAudit[Observable, Alert] val organisation = new SelfContextObjectAudit[Organisation] val page = new SelfContextObjectAudit[Page] + val pattern = new SelfContextObjectAudit[Pattern] val procedure = new SelfContextObjectAudit[Procedure] val profile = new SelfContextObjectAudit[Profile] val share = new ShareAudit diff --git a/thehive/app/org/thp/thehive/services/PatternSrv.scala b/thehive/app/org/thp/thehive/services/PatternSrv.scala index 1c4e63d50a..7c47c3c936 100644 --- a/thehive/app/org/thp/thehive/services/PatternSrv.scala +++ b/thehive/app/org/thp/thehive/services/PatternSrv.scala @@ -15,7 +15,10 @@ import javax.inject.{Inject, Named, Singleton} import scala.util.{Success, Try} @Singleton -class PatternSrv @Inject() ()(implicit +class PatternSrv @Inject() ( + auditSrv: AuditSrv, + organisationSrv: OrganisationSrv +)(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Pattern] { val patternPatternSrv = new EdgeSrv[PatternPattern, Pattern, Pattern] @@ -30,6 +33,12 @@ class PatternSrv @Inject() ()(implicit override def getByName(name: String)(implicit graph: Graph): Traversal.V[Pattern] = Try(startTraversal.getByPatternId(name)).getOrElse(startTraversal.limit(0)) + def remove(pattern: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + for { + organisation <- organisationSrv.getOrFail(authContext.organisation) + _ <- auditSrv.pattern.delete(pattern, organisation) + } yield get(pattern).remove() + } object PatternOps { diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala index 50cefe09fb..6548c104f6 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala @@ -52,5 +52,23 @@ class PatternCtrlTest extends PlaySpecification with TestAppBuilder { Some("1.0") ) } + + "delete a pattern" in testApp { app => + val request1 = FakeRequest("GET", "/api/v1/pattern/testPattern1") + .withHeaders("user" -> "certuser@thehive.local") + val result1 = app[PatternCtrl].get("T123")(request1) + status(result1) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result1)}") + + val request2 = FakeRequest("DELETE", "/api/v1/pattern/testPattern1") + .withHeaders("user" -> "admin@thehive.local") + val result2 = app[PatternCtrl].delete("T123")(request2) + status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") + + val request3 = FakeRequest("GET", "/api/v1/pattern/testPattern1") + .withHeaders("user" -> "certuser@thehive.local") + val result3 = app[PatternCtrl].get("T123")(request3) + status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") + } + } } From 69e09843e9e06532dac11c2e337e38536c48e623 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Fri, 15 Jan 2021 15:22:45 +0100 Subject: [PATCH 86/93] Added pattern import unit test --- .../controllers/v1/PatternCtrlTest.scala | 23 +- thehive/test/resources/patterns.json | 447 ++++++++++++++++++ 2 files changed, 466 insertions(+), 4 deletions(-) create mode 100644 thehive/test/resources/patterns.json diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala index 6548c104f6..bdf7ae6429 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala @@ -1,9 +1,12 @@ package org.thp.thehive.controllers.v1 import io.scalaland.chimney.dsl._ - +import org.thp.scalligraph.controllers.FakeTemporaryFile import org.thp.thehive.TestAppBuilder import org.thp.thehive.dto.v1.OutputPattern +import play.api.libs.json.JsArray +import play.api.mvc.MultipartFormData.FilePart +import play.api.mvc.{AnyContentAsMultipartFormData, MultipartFormData} import play.api.test.{FakeRequest, PlaySpecification} case class TestPattern( @@ -25,12 +28,24 @@ object TestPattern { class PatternCtrlTest extends PlaySpecification with TestAppBuilder { "pattern controller" should { - // TODO - /* "import json patterns" in testApp { app => + val request = FakeRequest("POST", "/api/v1/pattern/import/mitre") + .withHeaders("user" -> "admin@thehive.local") + .withBody( + AnyContentAsMultipartFormData( + MultipartFormData( + dataParts = Map.empty, + files = Seq(FilePart("file", "patterns.json", Option("application/json"), FakeTemporaryFile.fromResource("/patterns.json"))), + badParts = Seq() + ) + ) + ) + + val result = app[PatternCtrl].importMitre(request) + status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") + contentAsJson(result).as[JsArray].value.size must beEqualTo(8) } - */ "get a existing pattern" in testApp { app => val request = FakeRequest("GET", "/api/v1/pattern/T123") diff --git a/thehive/test/resources/patterns.json b/thehive/test/resources/patterns.json new file mode 100644 index 0000000000..3338d13fb7 --- /dev/null +++ b/thehive/test/resources/patterns.json @@ -0,0 +1,447 @@ +{ + "type": "bundle", + "id": "bundle--ad5f3bce-004b-417e-899d-392f8591ab55", + "spec_version": "2.0", + "objects": [ + { + "id": "attack-pattern--01df3350-ce05-4bdf-bdf8-0a919a66d4a8", + "name": ".bash_profile and .bashrc", + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1156", + "url": "https://attack.mitre.org/techniques/T1156" + }, + { + "url": "https://researchcenter.paloaltonetworks.com/2017/04/unit42-new-iotlinux-malware-targets-dvrs-forms-botnet/", + "description": "Claud Xiao, Cong Zheng, Yanhui Jia. (2017, April 6). New IoT/Linux Malware Targets DVRs, Forms Botnet. Retrieved February 19, 2018.", + "source_name": "amnesia malware" + } + ], + "revoked": true, + "type": "attack-pattern", + "modified": "2020-01-24T14:14:05.452Z", + "created": "2017-12-14T16:46:06.044Z" + }, + { + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1546.004", + "url": "https://attack.mitre.org/techniques/T1546/004" + }, + { + "url": "https://researchcenter.paloaltonetworks.com/2017/04/unit42-new-iotlinux-malware-targets-dvrs-forms-botnet/", + "description": "Claud Xiao, Cong Zheng, Yanhui Jia. (2017, April 6). New IoT/Linux Malware Targets DVRs, Forms Botnet. Retrieved February 19, 2018.", + "source_name": "amnesia malware" + } + ], + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "name": ".bash_profile and .bashrc", + "description": "Adversaries may establish persistence by executing malicious content triggered by a user’s shell. ~/.bash_profile and ~/.bashrc are shell scripts that contain shell commands. These files are executed in a user's context when a new shell opens or when a user logs in so that their environment is set correctly.\n\n~/.bash_profile is executed for login shells and ~/.bashrc is executed for interactive non-login shells. This means that when a user logs in (via username and password) to the console (either locally or remotely via something like SSH), the ~/.bash_profile script is executed before the initial command prompt is returned to the user. After that, every time a new shell is opened, the ~/.bashrc script is executed. This allows users more fine-grained control over when they want certain commands executed. These shell scripts are meant to be written to by the local user to configure their own environment.\n\nThe macOS Terminal.app is a little different in that it runs a login shell by default each time a new terminal window is opened, thus calling ~/.bash_profile each time instead of ~/.bashrc.\n\nAdversaries may abuse these shell scripts by inserting arbitrary shell commands that may be used to execute other binaries to gain persistence. Every time the user logs in or opens a new shell, the modified ~/.bash_profile and/or ~/.bashrc scripts will be executed.(Citation: amnesia malware)", + "id": "attack-pattern--b63a34e8-0a61-4c97-a23b-bf8a2ed812e2", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "privilege-escalation" + }, + { + "kill_chain_name": "mitre-attack", + "phase_name": "persistence" + } + ], + "modified": "2020-03-24T16:28:04.990Z", + "created": "2020-01-24T14:13:45.936Z", + "x_mitre_version": "1.0", + "x_mitre_is_subtechnique": true, + "x_mitre_permissions_required": [ + "User", + "Administrator" + ], + "x_mitre_detection": "While users may customize their ~/.bashrc and ~/.bash_profile files , there are only certain types of commands that typically appear in these files. Monitor for abnormal commands such as execution of unknown programs, opening network sockets, or reaching out across the network when user profiles are loaded during the login process.", + "x_mitre_data_sources": [ + "Process use of network", + "Process command-line parameters", + "Process monitoring", + "File monitoring" + ], + "x_mitre_platforms": [ + "Linux", + "macOS" + ] + }, + { + "external_references": [ + { + "url": "https://attack.mitre.org/techniques/T1003/008", + "external_id": "T1003.008", + "source_name": "mitre-attack" + }, + { + "description": "The Linux Documentation Project. (n.d.). Linux Password and Shadow File Formats. Retrieved February 19, 2020.", + "url": "https://www.tldp.org/LDP/lame/LAME/linux-admin-made-easy/shadow-file-formats.html", + "source_name": "Linux Password and Shadow File Formats" + }, + { + "description": "Vivek Gite. (2014, September 17). Linux Password Cracking: Explain unshadow and john Commands (John the Ripper Tool). Retrieved February 19, 2020.", + "url": "https://www.cyberciti.biz/faq/unix-linux-password-cracking-john-the-ripper/", + "source_name": "nixCraft - John the Ripper" + } + ], + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "name": "/etc/passwd and /etc/shadow", + "description": "Adversaries may attempt to dump the contents of /etc/passwd and /etc/shadow to enable offline password cracking. Most modern Linux operating systems use a combination of /etc/passwd and /etc/shadow to store user account information including password hashes in /etc/shadow. By default, /etc/shadow is only readable by the root user.(Citation: Linux Password and Shadow File Formats)\n\nThe Linux utility, unshadow, can be used to combine the two files in a format suited for password cracking utilities such as John the Ripper:(Citation: nixCraft - John the Ripper) # /usr/bin/unshadow /etc/passwd /etc/shadow > /tmp/crack.password.db\n", + "id": "attack-pattern--d0b4fcdb-d67d-4ed2-99ce-788b12f8c0f4", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "credential-access" + } + ], + "modified": "2020-03-20T15:56:55.022Z", + "created": "2020-02-11T18:46:56.263Z", + "x_mitre_detection": "The AuditD monitoring tool, which ships stock in many Linux distributions, can be used to watch for hostile processes attempting to access /etc/passwd and /etc/shadow, alerting on the pid, process name, and arguments of such programs.", + "x_mitre_permissions_required": [ + "root" + ], + "x_mitre_version": "1.0", + "x_mitre_is_subtechnique": true, + "x_mitre_platforms": [ + "Linux" + ] + }, + { + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1557.002", + "url": "https://attack.mitre.org/techniques/T1557/002" + }, + { + "source_name": "RFC826 ARP", + "url": "https://tools.ietf.org/html/rfc826", + "description": "Plummer, D. (1982, November). An Ethernet Address Resolution Protocol. Retrieved October 15, 2020." + }, + { + "source_name": "Sans ARP Spoofing Aug 2003", + "url": "https://pen-testing.sans.org/resources/papers/gcih/real-world-arp-spoofing-105411", + "description": "Siles, R. (2003, August). Real World ARP Spoofing. Retrieved October 15, 2020." + }, + { + "source_name": "Cylance Cleaver", + "description": "Cylance. (2014, December). Operation Cleaver. Retrieved September 14, 2017.", + "url": "https://www.cylance.com/content/dam/cylance/pages/operation-cleaver/Cylance_Operation_Cleaver_Report.pdf" + } + ], + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "name": "ARP Cache Poisoning", + "description": "Adversaries may poison Address Resolution Protocol (ARP) caches to position themselves between the communication of two or more networked devices. This activity may be used to enable follow-on behaviors such as [Network Sniffing](https://attack.mitre.org/techniques/T1040) or [Transmitted Data Manipulation](https://attack.mitre.org/techniques/T1565/002).\n\nThe ARP protocol is used to resolve IPv4 addresses to link layer addresses, such as a media access control (MAC) address.(Citation: RFC826 ARP) Devices in a local network segment communicate with each other by using link layer addresses. If a networked device does not have the link layer address of a particular networked device, it may send out a broadcast ARP request to the local network to translate the IP address to a MAC address. The device with the associated IP address directly replies with its MAC address. The networked device that made the ARP request will then use as well as store that information in its ARP cache.\n\nAn adversary may passively wait for an ARP request to poison the ARP cache of the requesting device. The adversary may reply with their MAC address, thus deceiving the victim by making them believe that they are communicating with the intended networked device. For the adversary to poison the ARP cache, their reply must be faster than the one made by the legitimate IP address owner. Adversaries may also send a gratuitous ARP reply that maliciously announces the ownership of a particular IP address to all the devices in the local network segment.\n\nThe ARP protocol is stateless and does not require authentication. Therefore, devices may wrongly add or update the MAC address of the IP address in their ARP cache.(Citation: Sans ARP Spoofing Aug 2003)(Citation: Cylance Cleaver)\n\nAdversaries may use ARP cache poisoning as a means to man-in-the-middle (MiTM) network traffic. This activity may be used to collect and/or relay data such as credentials, especially those sent over an insecure, unencrypted protocol.(Citation: Sans ARP Spoofing Aug 2003)\n", + "id": "attack-pattern--cabe189c-a0e3-4965-a473-dcff00f17213", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "credential-access" + }, + { + "kill_chain_name": "mitre-attack", + "phase_name": "collection" + } + ], + "modified": "2020-10-16T15:22:11.604Z", + "created": "2020-10-15T12:05:58.755Z", + "x_mitre_version": "1.0", + "x_mitre_is_subtechnique": true, + "x_mitre_permissions_required": [ + "User" + ], + "x_mitre_detection": "Monitor network traffic for unusual ARP traffic, gratuitous ARP replies may be suspicious. \n\nConsider collecting changes to ARP caches across endpoints for signs of ARP poisoning. For example, if multiple IP addresses map to a single MAC address, this could be an indicator that the ARP cache has been poisoned.", + "x_mitre_data_sources": [ + "Packet capture", + "Netflow/Enclave netflow" + ], + "x_mitre_contributors": [ + "Jon Sternstein, Stern Security" + ], + "x_mitre_platforms": [ + "Linux", + "Windows", + "macOS" + ] + }, + { + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1558.004", + "url": "https://attack.mitre.org/techniques/T1558/004" + }, + { + "source_name": "Harmj0y Roasting AS-REPs Jan 2017", + "url": "http://www.harmj0y.net/blog/activedirectory/roasting-as-reps/", + "description": "HarmJ0y. (2017, January 17). Roasting AS-REPs. Retrieved August 24, 2020." + }, + { + "source_name": "Microsoft Kerberos Preauth 2014", + "url": "https://social.technet.microsoft.com/wiki/contents/articles/23559.kerberos-pre-authentication-why-it-should-not-be-disabled.aspx", + "description": "Sanyal, M.. (2014, March 18). Kerberos Pre-Authentication: Why It Should Not Be Disabled. Retrieved August 25, 2020." + }, + { + "source_name": "Stealthbits Cracking AS-REP Roasting Jun 2019", + "url": "https://blog.stealthbits.com/cracking-active-directory-passwords-with-as-rep-roasting/", + "description": "Jeff Warren. (2019, June 27). Cracking Active Directory Passwords with AS-REP Roasting. Retrieved August 24, 2020." + }, + { + "description": "Medin, T. (2014, November). Attacking Kerberos - Kicking the Guard Dog of Hades. Retrieved March 22, 2018.", + "source_name": "SANS Attacking Kerberos Nov 2014", + "url": "https://redsiege.com/kerberoast-slides" + }, + { + "url": "https://adsecurity.org/?p=2293", + "description": "Metcalf, S. (2015, December 31). Cracking Kerberos TGS Tickets Using Kerberoast – Exploiting Kerberos to Compromise the Active Directory Domain. Retrieved March 22, 2018.", + "source_name": "AdSecurity Cracking Kerberos Dec 2015" + }, + { + "url": "https://blogs.technet.microsoft.com/motiba/2018/02/23/detecting-kerberoasting-activity-using-azure-security-center/", + "description": "Bani, M. (2018, February 23). Detecting Kerberoasting activity using Azure Security Center. Retrieved March 23, 2018.", + "source_name": "Microsoft Detecting Kerberoasting Feb 2018" + }, + { + "source_name": "Microsoft 4768 TGT 2017", + "url": "https://docs.microsoft.com/en-us/windows/security/threat-protection/auditing/event-4768", + "description": "Microsoft. (2017, April 19). 4768(S, F): A Kerberos authentication ticket (TGT) was requested. Retrieved August 24, 2020." + } + ], + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "name": "AS-REP Roasting", + "description": "Adversaries may reveal credentials of accounts that have disabled Kerberos preauthentication by [Password Cracking](https://attack.mitre.org/techniques/T1110/002) Kerberos messages.(Citation: Harmj0y Roasting AS-REPs Jan 2017) \n\nPreauthentication offers protection against offline [Password Cracking](https://attack.mitre.org/techniques/T1110/002). When enabled, a user requesting access to a resource initiates communication with the Domain Controller (DC) by sending an Authentication Server Request (AS-REQ) message with a timestamp that is encrypted with the hash of their password. If and only if the DC is able to successfully decrypt the timestamp with the hash of the user’s password, it will then send an Authentication Server Response (AS-REP) message that contains the Ticket Granting Ticket (TGT) to the user. Part of the AS-REP message is signed with the user’s password.(Citation: Microsoft Kerberos Preauth 2014)\n\nFor each account found without preauthentication, an adversary may send an AS-REQ message without the encrypted timestamp and receive an AS-REP message with TGT data which may be encrypted with an insecure algorithm such as RC4. The recovered encrypted data may be vulnerable to offline [Password Cracking](https://attack.mitre.org/techniques/T1110/002) attacks similarly to [Kerberoasting](https://attack.mitre.org/techniques/T1558/003) and expose plaintext credentials. (Citation: Harmj0y Roasting AS-REPs Jan 2017)(Citation: Stealthbits Cracking AS-REP Roasting Jun 2019) \n\nAn account registered to a domain, with or without special privileges, can be abused to list all domain accounts that have preauthentication disabled by utilizing Windows tools like [PowerShell](https://attack.mitre.org/techniques/T1059/001) with an LDAP filter. Alternatively, the adversary may send an AS-REQ message for each user. If the DC responds without errors, the account does not require preauthentication and the AS-REP message will already contain the encrypted data. (Citation: Harmj0y Roasting AS-REPs Jan 2017)(Citation: Stealthbits Cracking AS-REP Roasting Jun 2019)\n\nCracked hashes may enable [Persistence](https://attack.mitre.org/tactics/TA0003), [Privilege Escalation](https://attack.mitre.org/tactics/TA0004), and [Lateral Movement](https://attack.mitre.org/tactics/TA0008) via access to [Valid Accounts](https://attack.mitre.org/techniques/T1078).(Citation: SANS Attacking Kerberos Nov 2014)", + "id": "attack-pattern--3986e7fd-a8e9-4ecb-bfc6-55920855912b", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "credential-access" + } + ], + "modified": "2020-10-20T19:30:11.783Z", + "created": "2020-08-24T13:43:00.028Z", + "x_mitre_version": "1.0", + "x_mitre_is_subtechnique": true, + "x_mitre_system_requirements": [ + "Valid domain account" + ], + "x_mitre_permissions_required": [ + "User" + ], + "x_mitre_detection": "Enable Audit Kerberos Service Ticket Operations to log Kerberos TGS service ticket requests. Particularly investigate irregular patterns of activity (ex: accounts making numerous requests, Event ID 4768 and 4769, within a small time frame, especially if they also request RC4 encryption [Type 0x17], pre-authentication not required [Type: 0x0]).(Citation: AdSecurity Cracking Kerberos Dec 2015)(Citation: Microsoft Detecting Kerberoasting Feb 2018)(Citation: Microsoft 4768 TGT 2017)", + "x_mitre_data_sources": [ + "Windows event logs", + "Authentication logs" + ], + "x_mitre_contributors": [ + "James Dunn, @jamdunnDFW, EY", + "Swapnil Kumbhar", + "Jacques Pluviose, @Jacqueswildy_IT", + "Dan Nutting, @KerberToast" + ], + "x_mitre_platforms": [ + "Windows" + ] + }, + { + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1548", + "url": "https://attack.mitre.org/techniques/T1548" + } + ], + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "name": "Abuse Elevation Control Mechanism", + "description": "Adversaries may circumvent mechanisms designed to control elevate privileges to gain higher-level permissions. Most modern systems contain native elevation control mechanisms that are intended to limit privileges that a user can perform on a machine. Authorization has to be granted to specific users in order to perform tasks that can be considered of higher risk. An adversary can perform several methods to take advantage of built-in control mechanisms in order to escalate privileges on a system.", + "id": "attack-pattern--67720091-eee3-4d2d-ae16-8264567f6f5b", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "privilege-escalation" + }, + { + "kill_chain_name": "mitre-attack", + "phase_name": "defense-evasion" + } + ], + "modified": "2020-07-22T21:36:52.825Z", + "created": "2020-01-30T13:58:14.373Z", + "x_mitre_data_sources": [ + "Windows Registry", + "File monitoring", + "Process command-line parameters", + "API monitoring", + "Process monitoring" + ], + "x_mitre_permissions_required": [ + "Administrator", + "User" + ], + "x_mitre_detection": "Monitor the file system for files that have the setuid or setgid bits set. Also look for any process API calls for behavior that may be indicative of [Process Injection](https://attack.mitre.org/techniques/T1055) and unusual loaded DLLs through [DLL Search Order Hijacking](https://attack.mitre.org/techniques/T1574/001), which indicate attempts to gain access to higher privileged processes. On Linux, auditd can alert every time a user's actual ID and effective ID are different (this is what happens when you sudo).\n\nConsider monitoring for /usr/libexec/security_authtrampoline executions which may indicate that AuthorizationExecuteWithPrivileges is being executed. MacOS system logs may also indicate when AuthorizationExecuteWithPrivileges is being called. Monitoring OS API callbacks for the execution can also be a way to detect this behavior but requires specialized security tooling.\n\nOn Linux, auditd can alert every time a user's actual ID and effective ID are different (this is what happens when you sudo). This technique is abusing normal functionality in macOS and Linux systems, but sudo has the ability to log all input and output based on the LOG_INPUT and LOG_OUTPUT directives in the /etc/sudoers file.\n\nThere are many ways to perform UAC bypasses when a user is in the local administrator group on a system, so it may be difficult to target detection on all variations. Efforts should likely be placed on mitigation and collecting enough information on process launches and actions that could be performed before and after a UAC bypass is performed. Some UAC bypass methods rely on modifying specific, user-accessible Registry settings. Analysts should monitor Registry settings for unauthorized changes.", + "x_mitre_version": "1.0", + "x_mitre_is_subtechnique": false, + "x_mitre_platforms": [ + "Linux", + "macOS", + "Windows" + ] + }, + { + "object_marking_refs": [ + "marking-definition--fa42a846-8d90-4e51-bc29-71d5b4802168" + ], + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1134", + "url": "https://attack.mitre.org/techniques/T1134" + }, + { + "external_id": "CAPEC-633", + "source_name": "capec", + "url": "https://capec.mitre.org/data/definitions/633.html" + }, + { + "url": "https://pentestlab.blog/2017/04/03/token-manipulation/", + "description": "netbiosX. (2017, April 3). Token Manipulation. Retrieved April 21, 2017.", + "source_name": "Pentestlab Token Manipulation" + }, + { + "url": "https://technet.microsoft.com/en-us/windows-server-docs/identity/ad-ds/manage/component-updates/command-line-process-auditing", + "description": "Mathers, B. (2017, March 7). Command line process auditing. Retrieved April 21, 2017.", + "source_name": "Microsoft Command-line Logging" + }, + { + "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa378184(v=vs.85).aspx", + "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.", + "source_name": "Microsoft LogonUser" + }, + { + "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa446617(v=vs.85).aspx", + "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.", + "source_name": "Microsoft DuplicateTokenEx" + }, + { + "url": "https://msdn.microsoft.com/en-us/library/windows/desktop/aa378612(v=vs.85).aspx", + "description": "Microsoft TechNet. (n.d.). Retrieved April 25, 2017.", + "source_name": "Microsoft ImpersonateLoggedOnUser" + }, + { + "url": "https://www.blackhat.com/docs/eu-17/materials/eu-17-Atkinson-A-Process-Is-No-One-Hunting-For-Token-Manipulation.pdf", + "description": "Atkinson, J., Winchester, R. (2017, December 7). A Process is No One: Hunting for Token Manipulation. Retrieved December 21, 2017.", + "source_name": "BlackHat Atkinson Winchester Token Manipulation" + } + ], + "description": "Adversaries may modify access tokens to operate under a different user or system security context to perform actions and bypass access controls. Windows uses access tokens to determine the ownership of a running process. A user can manipulate access tokens to make a running process appear as though it is the child of a different process or belongs to someone other than the user that started the process. When this occurs, the process also takes on the security context associated with the new token.\n\nAn adversary can use built-in Windows API functions to copy access tokens from existing processes; this is known as token stealing. These token can then be applied to an existing process (i.e. [Token Impersonation/Theft](https://attack.mitre.org/techniques/T1134/001)) or used to spawn a new process (i.e. [Create Process with Token](https://attack.mitre.org/techniques/T1134/002)). An adversary must already be in a privileged user context (i.e. administrator) to steal a token. However, adversaries commonly use token stealing to elevate their security context from the administrator level to the SYSTEM level. An adversary can then use a token to authenticate to a remote system as the account for that token if the account has appropriate permissions on the remote system.(Citation: Pentestlab Token Manipulation)\n\nAny standard user can use the runas command, and the Windows API functions, to create impersonation tokens; it does not require access to an administrator account. There are also other mechanisms, such as Active Directory fields, that can be used to modify access tokens.", + "name": "Access Token Manipulation", + "created_by_ref": "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5", + "id": "attack-pattern--dcaa092b-7de9-4a21-977f-7fcb77e89c48", + "type": "attack-pattern", + "kill_chain_phases": [ + { + "kill_chain_name": "mitre-attack", + "phase_name": "defense-evasion" + }, + { + "kill_chain_name": "mitre-attack", + "phase_name": "privilege-escalation" + } + ], + "modified": "2020-04-16T19:37:02.355Z", + "created": "2017-12-14T16:46:06.044Z", + "x_mitre_defense_bypassed": [ + "Windows User Account Control", + "System access controls", + "File system access controls", + "Heuristic Detection", + "Host forensic analysis" + ], + "x_mitre_is_subtechnique": false, + "x_mitre_version": "2.0", + "x_mitre_contributors": [ + "Tom Ueltschi @c_APT_ure", + "Travis Smith, Tripwire", + "Robby Winchester, @robwinchester3", + "Jared Atkinson, @jaredcatkinson" + ], + "x_mitre_data_sources": [ + "Authentication logs", + "Windows event logs", + "API monitoring", + "Access tokens", + "Process monitoring", + "Process command-line parameters" + ], + "x_mitre_detection": "If an adversary is using a standard command-line shell, analysts can detect token manipulation by auditing command-line activity. Specifically, analysts should look for use of the runas command. Detailed command-line logging is not enabled by default in Windows.(Citation: Microsoft Command-line Logging)\n\nIf an adversary is using a payload that calls the Windows token APIs directly, analysts can detect token manipulation only through careful analysis of user network activity, examination of running processes, and correlation with other endpoint and network behavior. \n\nThere are many Windows API calls a payload can take advantage of to manipulate access tokens (e.g., LogonUser (Citation: Microsoft LogonUser), DuplicateTokenEx(Citation: Microsoft DuplicateTokenEx), and ImpersonateLoggedOnUser(Citation: Microsoft ImpersonateLoggedOnUser)). Please see the referenced Windows API pages for more information.\n\nQuery systems for process and thread token information and look for inconsistencies such as user owns processes impersonating the local SYSTEM account.(Citation: BlackHat Atkinson Winchester Token Manipulation)\n\nLook for inconsistencies between the various fields that store PPID information, such as the EventHeader ProcessId from data collected via Event Tracing for Windows (ETW), Creator Process ID/Name from Windows event logs, and the ProcessID and ParentProcessID (which are also produced from ETW and other utilities such as Task Manager and Process Explorer). The ETW provided EventHeader ProcessId identifies the actual parent process.", + "x_mitre_permissions_required": [ + "User", + "Administrator" + ], + "x_mitre_effective_permissions": [ + "SYSTEM" + ], + "x_mitre_platforms": [ + "Windows" + ] + }, + { + "external_references": [ + { + "source_name": "mitre-attack", + "external_id": "T1015", + "url": "https://attack.mitre.org/techniques/T1015" + }, + { + "external_id": "CAPEC-558", + "source_name": "capec", + "url": "https://capec.mitre.org/data/definitions/558.html" + }, + { + "url": "https://www.fireeye.com/blog/threat-research/2012/08/hikit-rootkit-advanced-persistent-attack-techniques-part-1.html", + "description": "Glyer, C., Kazanciyan, R. (2012, August 20). The “Hikit” Rootkit: Advanced and Persistent Attack Techniques (Part 1). Retrieved June 6, 2016.", + "source_name": "FireEye Hikit Rootkit" + }, + { + "url": "https://www.slideshare.net/DennisMaldonado5/sticky-keys-to-the-kingdom", + "description": "Maldonado, D., McGuffin, T. (2016, August 6). Sticky Keys to the Kingdom. Retrieved July 5, 2017.", + "source_name": "DEFCON2016 Sticky Keys" + }, + { + "url": "http://blog.crowdstrike.com/registry-analysis-with-crowdresponse/", + "description": "Tilbury, C. (2014, August 28). Registry Analysis with CrowdResponse. Retrieved November 12, 2014.", + "source_name": "Tilbury 2014" + } + ], + "name": "Accessibility Features", + "id": "attack-pattern--9b99b83a-1aac-4e29-b975-b374950551a3", + "revoked": true, + "type": "attack-pattern", + "modified": "2020-05-13T20:37:30.008Z", + "created": "2017-05-31T21:30:26.946Z" + } +] +} \ No newline at end of file From a94258a4f6f509ac9a178aac7c37614494a49901 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 18 Jan 2021 11:59:43 +0100 Subject: [PATCH 87/93] Added get patterns linked to case --- .../scala/org/thp/thehive/dto/v1/Pattern.scala | 2 +- .../thp/thehive/controllers/v1/Conversion.scala | 2 +- .../thp/thehive/controllers/v1/PatternCtrl.scala | 8 ++++++++ .../thp/thehive/controllers/v1/Properties.scala | 2 +- .../org/thp/thehive/controllers/v1/Router.scala | 1 + thehive/app/org/thp/thehive/models/Pattern.scala | 4 ++-- .../app/org/thp/thehive/services/CaseSrv.scala | 2 ++ .../app/org/thp/thehive/services/PatternSrv.scala | 14 +++++++++++++- .../org/thp/thehive/services/ProcedureSrv.scala | 4 ++++ .../thehive/controllers/v1/PatternCtrlTest.scala | 14 ++++++++++++-- .../controllers/v1/ProcedureCtrlTest.scala | 12 ++++++------ thehive/test/resources/data/CaseProcedure.json | 3 ++- thehive/test/resources/data/Pattern.json | 15 +++++++++++++++ thehive/test/resources/data/Procedure.json | 5 +++++ thehive/test/resources/data/ProcedurePattern.json | 3 ++- 15 files changed, 75 insertions(+), 16 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala index 38b2b3a88b..c1220dade9 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala @@ -98,7 +98,7 @@ case class OutputPattern( patternId: String, name: String, description: Option[String], - tactics: Seq[String], + tactics: Set[String], url: String, patternType: String, platforms: Seq[String], diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index 9058aa3a30..fceb63e625 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -463,7 +463,7 @@ object Conversion { inputPattern .into[Pattern] .withFieldRenamed(_.external_id, _.patternId) - .withFieldComputed(_.tactics, _.kill_chain_phases.map(_.phase_name)) + .withFieldComputed(_.tactics, _.kill_chain_phases.map(_.phase_name).toSet) .withFieldRenamed(_.`type`, _.patternType) .withFieldRenamed(_.x_mitre_platforms, _.platforms) .withFieldRenamed(_.x_mitre_data_sources, _.dataSources) diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala index 6dd894e93c..2e07e25645 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -82,6 +82,14 @@ class PatternCtrl @Inject() ( .map(richPattern => Results.Ok(richPattern.toJson)) } + def getCasePatterns(caseId: String): Action[AnyContent] = + entrypoint("get case patterns") + .authRoTransaction(db) { implicit request => implicit graph => + for { + patternIds <- patternSrv.getCasePatterns(caseId) + } yield Results.Ok(patternIds.toJson) + } + def delete(patternId: String): Action[AnyContent] = entrypoint("delete pattern") .authPermittedTransaction(db, Permissions.managePattern) { implicit request => implicit graph => diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index f6783981be..0103e69de6 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -355,7 +355,7 @@ class Properties @Inject() ( .property("patternId", UMapping.string)(_.field.readonly) .property("name", UMapping.string)(_.field.readonly) .property("description", UMapping.string.optional)(_.field.updatable) - .property("tactics", UMapping.string.sequence)(_.field.readonly) + .property("tactics", UMapping.string.set)(_.field.readonly) .property("url", UMapping.string)(_.field.updatable) .property("patternType", UMapping.string)(_.field.readonly) .property("platforms", UMapping.string.sequence)(_.field.readonly) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index 91cf7fd582..d9f786bca5 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -135,6 +135,7 @@ class Router @Inject() ( case POST(p"/pattern/import/mitre") => patternCtrl.importMitre case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) + case GET(p"/pattern/case/$caseId") => patternCtrl.getCasePatterns(caseId) case DELETE(p"/pattern/$patternId") => patternCtrl.delete(patternId) case POST(p"/procedure") => procedureCtrl.create diff --git a/thehive/app/org/thp/thehive/models/Pattern.scala b/thehive/app/org/thp/thehive/models/Pattern.scala index 5c090c5ea8..3713901a05 100644 --- a/thehive/app/org/thp/thehive/models/Pattern.scala +++ b/thehive/app/org/thp/thehive/models/Pattern.scala @@ -10,7 +10,7 @@ case class Pattern( patternId: String, name: String, description: Option[String], - tactics: Seq[String], + tactics: Set[String], url: String, patternType: String, platforms: Seq[String], @@ -25,7 +25,7 @@ case class RichPattern(pattern: Pattern with Entity, parent: Option[Pattern with def patternId: String = pattern.patternId def name: String = pattern.name def description: Option[String] = pattern.description - def tactics: Seq[String] = pattern.tactics + def tactics: Set[String] = pattern.tactics def url: String = pattern.url def patternType: String = pattern.patternType def platforms: Seq[String] = pattern.platforms diff --git a/thehive/app/org/thp/thehive/services/CaseSrv.scala b/thehive/app/org/thp/thehive/services/CaseSrv.scala index d29415c808..b0556e5495 100644 --- a/thehive/app/org/thp/thehive/services/CaseSrv.scala +++ b/thehive/app/org/thp/thehive/services/CaseSrv.scala @@ -555,6 +555,8 @@ object CaseOps { def alert: Traversal.V[Alert] = traversal.in[AlertCase].v[Alert] + def procedure: Traversal.V[Procedure] = traversal.out[CaseProcedure].v[Procedure] + def isActionRequired(implicit authContext: AuthContext): Traversal[Boolean, Boolean, Converter.Identity[Boolean]] = traversal.choose(_.share(authContext).outE[ShareTask].has(_.actionRequired, true), true, false) diff --git a/thehive/app/org/thp/thehive/services/PatternSrv.scala b/thehive/app/org/thp/thehive/services/PatternSrv.scala index 7c47c3c936..f2ac79b937 100644 --- a/thehive/app/org/thp/thehive/services/PatternSrv.scala +++ b/thehive/app/org/thp/thehive/services/PatternSrv.scala @@ -8,7 +8,9 @@ import org.thp.scalligraph.services._ import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{Converter, Traversal} import org.thp.thehive.models._ +import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.PatternOps._ +import org.thp.thehive.services.ProcedureOps._ import java.util.{Map => JMap} import javax.inject.{Inject, Named, Singleton} @@ -17,6 +19,7 @@ import scala.util.{Success, Try} @Singleton class PatternSrv @Inject() ( auditSrv: AuditSrv, + caseSrv: CaseSrv, organisationSrv: OrganisationSrv )(implicit @Named("with-thehive-schema") db: Database @@ -28,11 +31,17 @@ class PatternSrv @Inject() ( def setParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] = if (parentExists(child, parent)) Success(()) - else patternPatternSrv.create(PatternPattern(), child, parent).map(_ => ()) + else patternPatternSrv.create(PatternPattern(), parent, child).map(_ => ()) override def getByName(name: String)(implicit graph: Graph): Traversal.V[Pattern] = Try(startTraversal.getByPatternId(name)).getOrElse(startTraversal.limit(0)) + def getCasePatterns(caseId: String)(implicit graph: Graph): Try[Seq[String]] = + for { + caze <- caseSrv.get(EntityIdOrName(caseId)).getOrFail("Case") + patterns = caseSrv.get(caze).procedure.pattern.richPattern.toSeq + } yield patterns.map(_.patternId) + def remove(pattern: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { organisation <- organisationSrv.getOrFail(authContext.organisation) @@ -51,6 +60,9 @@ object PatternOps { def parent: Traversal.V[Pattern] = traversal.in[PatternPattern].v[Pattern] + def procedure: Traversal.V[Procedure] = + traversal.in[ProcedurePattern].v[Procedure] + def alreadyImported(patternId: String): Boolean = traversal.getByPatternId(patternId).exists diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala index 2015b75bca..08245e5bc8 100644 --- a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala +++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala @@ -50,6 +50,10 @@ class ProcedureSrv @Inject() ( object ProcedureOps { implicit class ProcedureOpsDefs(traversal: Traversal.V[Procedure]) { + + def pattern: Traversal.V[Pattern] = + traversal.out[ProcedurePattern].v[Pattern] + def richProcedure: Traversal[RichProcedure, JMap[String, Any], Converter[RichProcedure, JMap[String, Any]]] = { val procedure = StepLabel.v[Procedure] val pattern = StepLabel.v[Pattern] diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala index bdf7ae6429..61d689a0d0 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala @@ -13,7 +13,7 @@ case class TestPattern( patternId: String, name: String, description: Option[String], - tactics: Seq[String], + tactics: Set[String], url: String, patternType: String, platforms: Seq[String], @@ -59,7 +59,7 @@ class PatternCtrlTest extends PlaySpecification with TestAppBuilder { "T123", "testPattern1", Some("The testPattern 1"), - Seq("testTactic1", "testTactic2"), + Set("testTactic1", "testTactic2"), "http://test.pattern.url", "unit-test", Seq(), @@ -68,6 +68,16 @@ class PatternCtrlTest extends PlaySpecification with TestAppBuilder { ) } + "get patterns linked to case" in testApp { app => + val request = FakeRequest("GET", "/api/v1/pattern/case/1") + .withHeaders("user" -> "certuser@thehive.local") + + val result = app[PatternCtrl].getCasePatterns("1")(request) + status(result) must beEqualTo(200).updateMessage(s => s"$s\n${contentAsString(result)}") + + contentAsJson(result).as[JsArray].value.size must beEqualTo(2) + } + "delete a pattern" in testApp { app => val request1 = FakeRequest("GET", "/api/v1/pattern/testPattern1") .withHeaders("user" -> "certuser@thehive.local") diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala index 5ab405d449..b12ea199d3 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala @@ -24,7 +24,7 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { "create a valid procedure" in testApp { app => val procedureDate = new Date() val inputProcedure = InputProcedure( - "testProcedure2", + "testProcedure3", procedureDate, "1", "T123" @@ -40,18 +40,18 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { val resultProcedure = contentAsJson(result).as[OutputProcedure] TestProcedure(resultProcedure) must_=== TestProcedure( - "testProcedure2", + "testProcedure3", procedureDate, "T123" ) } "delete a procedure" in testApp { app => - val request1 = FakeRequest("POST", "/api/v1/procedure/testProcedure2") + val request1 = FakeRequest("POST", "/api/v1/procedure/testProcedure3") .withJsonBody( Json.toJson( InputProcedure( - "testProcedure2", + "testProcedure3", new Date(), "1", "T123" @@ -63,12 +63,12 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { val procedureId = contentAsJson(result1).as[OutputProcedure]._id status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}") - val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure2") + val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure3") .withHeaders("user" -> "admin@thehive.local") val result2 = app[ProcedureCtrl].delete(procedureId)(request2) status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") - val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure2") + val request3 = FakeRequest("GET", "/api/v1/procedure/testProcedure3") .withHeaders("user" -> "certuser@thehive.local") val result3 = app[ProcedureCtrl].get(procedureId)(request3) status(result3) must beEqualTo(404).updateMessage(s => s"$s\n${contentAsString(result3)}") diff --git a/thehive/test/resources/data/CaseProcedure.json b/thehive/test/resources/data/CaseProcedure.json index f37bf68e40..b39e53162c 100644 --- a/thehive/test/resources/data/CaseProcedure.json +++ b/thehive/test/resources/data/CaseProcedure.json @@ -1,3 +1,4 @@ [ - {"from": "case1", "to": "testProcedure1"} + {"from": "case1", "to": "testProcedure1"}, + {"from": "case1", "to": "testProcedure2"} ] \ No newline at end of file diff --git a/thehive/test/resources/data/Pattern.json b/thehive/test/resources/data/Pattern.json index 593dbd169a..f1e0166ca6 100644 --- a/thehive/test/resources/data/Pattern.json +++ b/thehive/test/resources/data/Pattern.json @@ -13,5 +13,20 @@ "platforms": [], "dataSources": [], "version": "1.0" + }, + { + "id": "testPattern2", + "patternId": "T234", + "name": "testPattern2", + "description": "The testPattern 2", + "tactics": [ + "testTactic2", + "testTactic3" + ], + "url": "http://test.pattern2.url", + "patternType": "unit-test", + "platforms": [], + "dataSources": [], + "version": "1.1" } ] \ No newline at end of file diff --git a/thehive/test/resources/data/Procedure.json b/thehive/test/resources/data/Procedure.json index abc17a14db..d908b1b1ad 100644 --- a/thehive/test/resources/data/Procedure.json +++ b/thehive/test/resources/data/Procedure.json @@ -3,5 +3,10 @@ "id": "testProcedure1", "description": "The testProcedure 1", "occurence": 1531667370000 + }, + { + "id": "testProcedure2", + "description": "The testProcedure 2", + "occurence": 1531667370000 } ] \ No newline at end of file diff --git a/thehive/test/resources/data/ProcedurePattern.json b/thehive/test/resources/data/ProcedurePattern.json index f8c6c9b5a0..19541f6f82 100644 --- a/thehive/test/resources/data/ProcedurePattern.json +++ b/thehive/test/resources/data/ProcedurePattern.json @@ -1,3 +1,4 @@ [ - {"from": "testProcedure1", "to": "testPattern1"} + {"from": "testProcedure1", "to": "testPattern1"}, + {"from": "testProcedure2", "to": "testPattern2"} ] \ No newline at end of file From d771951f42ed89e580c0a04d836fcad5b3e8592e Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Mon, 18 Jan 2021 14:10:35 +0100 Subject: [PATCH 88/93] Fixed procedure properties --- thehive/app/org/thp/thehive/controllers/v1/Properties.scala | 2 +- thehive/app/org/thp/thehive/controllers/v1/Router.scala | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index 0103e69de6..c47a1c7b08 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -366,7 +366,7 @@ class Properties @Inject() ( lazy val procedure: PublicProperties = PublicPropertyListBuilder[Procedure] .property("description", UMapping.string)(_.field.updatable) - .property("description", UMapping.string)(_.field.readonly) + .property("occurence", UMapping.date)(_.field.readonly) .build lazy val profile: PublicProperties = diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index d9f786bca5..dc74300232 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -141,9 +141,6 @@ class Router @Inject() ( case POST(p"/procedure") => procedureCtrl.create case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId) case DELETE(p"/procedure/$procedureId") => procedureCtrl.delete(procedureId) - // List pattern id associated to a Case - // Query filter by pattern name - // Query filter bu pattern Id case POST(p"/profile") => profileCtrl.create case GET(p"/profile/$profileId") => profileCtrl.get(profileId) From c8450b5e17b40d25842a23dbecefb1b255b48f09 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Tue, 19 Jan 2021 16:04:46 +0100 Subject: [PATCH 89/93] Code review changes --- .../org/thp/thehive/dto/v1/Pattern.scala | 25 ++++----- .../thehive/controllers/v1/PatternCtrl.scala | 11 ++-- .../thp/thehive/controllers/v1/Router.scala | 8 +-- .../org/thp/thehive/models/Permissions.scala | 54 +++++++++---------- .../models/TheHiveSchemaDefinition.scala | 8 ++- .../org/thp/thehive/services/PatternSrv.scala | 7 +-- .../controllers/v1/PatternCtrlTest.scala | 2 +- .../controllers/v1/ProcedureCtrlTest.scala | 6 +-- .../thehive/controllers/v1/UserCtrlTest.scala | 1 + 9 files changed, 59 insertions(+), 63 deletions(-) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala index c1220dade9..306aa8bea9 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Pattern.scala @@ -19,7 +19,7 @@ case class InputPattern( case class InputReference( source_name: String, - external_id: String, + external_id: Option[String], url: String ) @@ -33,11 +33,11 @@ object InputReference { for { source_name <- (json \ "source_name").validate[String] external_id <- (json \ "external_id").validateOpt[String] - url <- (json \ "url").validateOpt[String] + url <- (json \ "url").validate[String] } yield InputReference( source_name, - external_id.getOrElse(""), - url.getOrElse("") + external_id, + url ) } @@ -45,15 +45,7 @@ object InputReference { } object InputKillChainPhase { - implicit val reads: Reads[InputKillChainPhase] = Reads[InputKillChainPhase] { json => - for { - kill_chain_name <- (json \ "kill_chain_name").validate[String] - phase_name <- (json \ "phase_name").validate[String] - } yield InputKillChainPhase( - kill_chain_name, - phase_name - ) - } + implicit val reads: Reads[InputKillChainPhase] = Json.reads[InputKillChainPhase] implicit val writes: Writes[InputKillChainPhase] = Json.writes[InputKillChainPhase] } @@ -62,7 +54,7 @@ object InputPattern { implicit val reads: Reads[InputPattern] = Reads[InputPattern] { json => for { references <- (json \ "external_references").validate[Seq[InputReference]] - mitreReference = references.find(_.source_name == "mitre-attack") + mitreReference = references.find(ref => isSourceNameValid(ref.source_name)) name <- (json \ "name").validate[String] description <- (json \ "description").validateOpt[String] kill_chain_phases <- (json \ "kill_chain_phases").validateOpt[Seq[InputKillChainPhase]] @@ -72,7 +64,7 @@ object InputPattern { x_mitre_is_subtechnique <- (json \ "x_mitre_is_subtechnique").validateOpt[Boolean] x_mitre_version <- (json \ "x_mitre_version").validateOpt[String] } yield InputPattern( - mitreReference.map(_.external_id).getOrElse(""), + mitreReference.flatMap(_.external_id).getOrElse(""), name, description, kill_chain_phases.getOrElse(Seq()), @@ -85,6 +77,9 @@ object InputPattern { ) } + private def isSourceNameValid(reference: String): Boolean = + reference == "mitre-attack" + implicit val writes: Writes[InputPattern] = Json.writes[InputPattern] } diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala index 2e07e25645..b06c03f848 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -101,9 +101,8 @@ class PatternCtrl @Inject() ( private def parseJsonFile(file: FFile): Try[Seq[InputPattern]] = for { - stream <- Try(new FileInputStream(file.filepath.toString)) - json = Json.parse(stream) - } yield (json \ "objects").get.as[Seq[InputPattern]] + json <- Try(Json.parse(new FileInputStream(file.filepath.toString))) + } yield (json \ "objects").as[Seq[InputPattern]] private def createFromInput(inputPattern: InputPattern)(implicit graph: Graph, authContext: AuthContext): Try[Pattern with Entity] = if (inputPattern.external_id.isEmpty) @@ -118,10 +117,10 @@ class PatternCtrl @Inject() ( private def linkPattern(child: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val firstDot = child.patternId.indexOf(".") - val parentId = child.patternId.substring(0, firstDot) for { - parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern") - _ <- patternSrv.setParent(child, parent) + parentId <- Try(child.patternId.substring(0, firstDot)) + parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern") + _ <- patternSrv.setParent(child, parent) } yield () } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index dc74300232..168535502d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -133,10 +133,10 @@ class Router @Inject() ( // POST /audit/_search controllers.AuditCtrl.find() // POST /audit/_stats controllers.AuditCtrl.stats() - case POST(p"/pattern/import/mitre") => patternCtrl.importMitre - case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) - case GET(p"/pattern/case/$caseId") => patternCtrl.getCasePatterns(caseId) - case DELETE(p"/pattern/$patternId") => patternCtrl.delete(patternId) + case POST(p"/pattern/import/attack") => patternCtrl.importMitre + case GET(p"/pattern/$patternId") => patternCtrl.get(patternId) + case GET(p"/pattern/case/$caseId") => patternCtrl.getCasePatterns(caseId) + case DELETE(p"/pattern/$patternId") => patternCtrl.delete(patternId) case POST(p"/procedure") => procedureCtrl.create case GET(p"/procedure/$procedureId") => procedureCtrl.get(procedureId) diff --git a/thehive/app/org/thp/thehive/models/Permissions.scala b/thehive/app/org/thp/thehive/models/Permissions.scala index e889467b6c..14ef1b1a0b 100644 --- a/thehive/app/org/thp/thehive/models/Permissions.scala +++ b/thehive/app/org/thp/thehive/models/Permissions.scala @@ -3,49 +3,49 @@ package org.thp.thehive.models import org.thp.scalligraph.auth.{Permission, PermissionDesc, Permissions => Perms} object Permissions extends Perms { - lazy val manageCase: PermissionDesc = PermissionDesc("manageCase", "Manage cases", "organisation") - lazy val manageObservable: PermissionDesc = PermissionDesc("manageObservable", "Manage observables", "organisation") + lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation") + lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") lazy val manageAlert: PermissionDesc = PermissionDesc("manageAlert", "Manage alerts", "organisation") - lazy val manageUser: PermissionDesc = PermissionDesc("manageUser", "Manage users", "organisation", "admin") - lazy val manageOrganisation: PermissionDesc = PermissionDesc("manageOrganisation", "Manage organisations", "admin") - lazy val manageCaseTemplate: PermissionDesc = PermissionDesc("manageCaseTemplate", "Manage case templates", "organisation") + lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation") lazy val manageAnalyzerTemplate: PermissionDesc = PermissionDesc("manageAnalyzerTemplate", "Manage analyzer templates", "admin") - lazy val manageTask: PermissionDesc = PermissionDesc("manageTask", "Manage tasks", "organisation") - lazy val manageAction: PermissionDesc = PermissionDesc("manageAction", "Run Cortex responders ", "organisation") + lazy val manageCase: PermissionDesc = PermissionDesc("manageCase", "Manage cases", "organisation") + lazy val manageCaseTemplate: PermissionDesc = PermissionDesc("manageCaseTemplate", "Manage case templates", "organisation") lazy val manageConfig: PermissionDesc = PermissionDesc("manageConfig", "Manage configurations", "organisation", "admin") - lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") - lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") lazy val manageCustomField: PermissionDesc = PermissionDesc("manageCustomField", "Manage custom fields", "admin") - lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") - lazy val manageAnalyse: PermissionDesc = PermissionDesc("manageAnalyse", "Run Cortex analyzer", "organisation") - lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation") + lazy val manageObservable: PermissionDesc = PermissionDesc("manageObservable", "Manage observables", "organisation") lazy val manageObservableTemplate: PermissionDesc = PermissionDesc("manageObservableTemplate", "Manage observable types", "admin") + lazy val manageOrganisation: PermissionDesc = PermissionDesc("manageOrganisation", "Manage organisations", "admin") + lazy val managePage: PermissionDesc = PermissionDesc("managePage", "Manage pages", "organisation") lazy val managePattern: PermissionDesc = PermissionDesc("managePattern", "Manage patterns", "admin") - lazy val manageProcedure: PermissionDesc = PermissionDesc("manageProcedure", "Manage procedures", "admin") - lazy val accessTheHiveFS: PermissionDesc = PermissionDesc("accessTheHiveFS", "Access to TheHiveFS", "organisation") + lazy val manageProcedure: PermissionDesc = PermissionDesc("manageProcedure", "Manage procedures", "organisation") + lazy val manageProfile: PermissionDesc = PermissionDesc("manageProfile", "Manage user profiles", "admin") + lazy val manageShare: PermissionDesc = PermissionDesc("manageShare", "Manage shares", "organisation") + lazy val manageTag: PermissionDesc = PermissionDesc("manageTag", "Manage tags", "admin") + lazy val manageTask: PermissionDesc = PermissionDesc("manageTask", "Manage tasks", "organisation") + lazy val manageUser: PermissionDesc = PermissionDesc("manageUser", "Manage users", "organisation", "admin") lazy val list: Set[PermissionDesc] = Set( - manageCase, - manageObservable, + accessTheHiveFS, + manageAction, manageAlert, - manageUser, - manageOrganisation, - manageCaseTemplate, + manageAnalyse, manageAnalyzerTemplate, - manageTask, - manageAction, + manageCase, + manageCaseTemplate, manageConfig, - manageProfile, - manageTag, manageCustomField, - manageShare, - manageAnalyse, - managePage, + manageObservable, manageObservableTemplate, + manageOrganisation, + managePage, managePattern, manageProcedure, - accessTheHiveFS + manageProfile, + manageShare, + manageTag, + manageTask, + manageUser ) // These permissions are available only if the user is in admin organisation, they are removed for other organisations diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index 0cef21f50b..d67dee2950 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -93,8 +93,12 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { traversal.unsafeHas("name", "admin").raw.property("permissions", "managePattern").iterate() Success(()) } - .updateGraph("Add manageProcedure permission to admin profile", "Profile") { traversal => - traversal.unsafeHas("name", "admin").raw.property("permissions", "manageProcedure").iterate() + .updateGraph("Add manageProcedure permission to org-admin and analyst profiles", "Profile") { traversal => + traversal + .unsafeHas("name", P.within("org-admin", "analyst")) + .raw + .property("permissions", "manageProcedure") + .iterate() Success(()) } diff --git a/thehive/app/org/thp/thehive/services/PatternSrv.scala b/thehive/app/org/thp/thehive/services/PatternSrv.scala index f2ac79b937..efb827937c 100644 --- a/thehive/app/org/thp/thehive/services/PatternSrv.scala +++ b/thehive/app/org/thp/thehive/services/PatternSrv.scala @@ -26,11 +26,11 @@ class PatternSrv @Inject() ( ) extends VertexSrv[Pattern] { val patternPatternSrv = new EdgeSrv[PatternPattern, Pattern, Pattern] - def parentExists(child: Pattern with Entity, parent: Pattern with Entity)(implicit graph: Graph): Boolean = + def cannotBeParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit graph: Graph): Boolean = child._id == parent._id || get(child).parent.getEntity(parent).exists def setParent(child: Pattern with Entity, parent: Pattern with Entity)(implicit authContext: AuthContext, graph: Graph): Try[Unit] = - if (parentExists(child, parent)) Success(()) + if (cannotBeParent(child, parent)) Success(()) else patternPatternSrv.create(PatternPattern(), parent, child).map(_ => ()) override def getByName(name: String)(implicit graph: Graph): Traversal.V[Pattern] = @@ -52,9 +52,6 @@ class PatternSrv @Inject() ( object PatternOps { implicit class PatternOpsDefs(traversal: Traversal.V[Pattern]) { - def get(idOrName: EntityIdOrName): Traversal.V[Pattern] = - idOrName.fold(traversal.getByIds(_), _ => traversal.limit(0)) - def getByPatternId(patternId: String): Traversal.V[Pattern] = traversal.has(_.patternId, patternId) def parent: Traversal.V[Pattern] = diff --git a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala index 61d689a0d0..3df975824e 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/PatternCtrlTest.scala @@ -29,7 +29,7 @@ object TestPattern { class PatternCtrlTest extends PlaySpecification with TestAppBuilder { "pattern controller" should { "import json patterns" in testApp { app => - val request = FakeRequest("POST", "/api/v1/pattern/import/mitre") + val request = FakeRequest("POST", "/api/v1/pattern/import/attack") .withHeaders("user" -> "admin@thehive.local") .withBody( AnyContentAsMultipartFormData( diff --git a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala index b12ea199d3..0b7d6fdf89 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/ProcedureCtrlTest.scala @@ -32,7 +32,7 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { val request = FakeRequest("POST", "/api/v1/procedure") .withJsonBody(Json.toJson(inputProcedure)) - .withHeaders("user" -> "admin@thehive.local") + .withHeaders("user" -> "certadmin@thehive.local") val result = app[ProcedureCtrl].create(request) status(result) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result)}") @@ -58,13 +58,13 @@ class ProcedureCtrlTest extends PlaySpecification with TestAppBuilder { ) ) ) - .withHeaders("user" -> "admin@thehive.local") + .withHeaders("user" -> "certadmin@thehive.local") val result1 = app[ProcedureCtrl].create(request1) val procedureId = contentAsJson(result1).as[OutputProcedure]._id status(result1) must beEqualTo(201).updateMessage(s => s"$s\n${contentAsString(result1)}") val request2 = FakeRequest("DELETE", "/api/v1/procedure/testProcedure3") - .withHeaders("user" -> "admin@thehive.local") + .withHeaders("user" -> "certadmin@thehive.local") val result2 = app[ProcedureCtrl].delete(procedureId)(request2) status(result2) must beEqualTo(204).updateMessage(s => s"$s\n${contentAsString(result2)}") diff --git a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala index 8a5773b794..fc5bd6cc82 100644 --- a/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v1/UserCtrlTest.scala @@ -107,6 +107,7 @@ class UserCtrlTest extends PlaySpecification with TestAppBuilder { Permissions.manageCase, Permissions.manageUser, Permissions.managePage, + Permissions.manageProcedure, Permissions.manageObservable, Permissions.manageAlert, Permissions.manageAction, From aacef353389fa47b9d3aa24b5eea71fb7dae350c Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 20 Jan 2021 15:26:08 +0100 Subject: [PATCH 90/93] Review changes --- .../thp/thehive/controllers/v1/PatternCtrl.scala | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala index b06c03f848..3c77c80d38 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/PatternCtrl.scala @@ -117,10 +117,14 @@ class PatternCtrl @Inject() ( private def linkPattern(child: Pattern with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val firstDot = child.patternId.indexOf(".") - for { - parentId <- Try(child.patternId.substring(0, firstDot)) - parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern") - _ <- patternSrv.setParent(child, parent) - } yield () + if (firstDot == -1) + Failure(BadRequestError("Invalid sub-pattern patternId ${child.patternId} (must contain a dot")) + else { + val parentId = child.patternId.substring(0, firstDot) + for { + parent <- patternSrv.startTraversal.getByPatternId(parentId).getOrFail("Pattern") + _ <- patternSrv.setParent(child, parent) + } yield () + } } } From dd794e5013fd7d4ce58f67ac4a7679eef27b8e38 Mon Sep 17 00:00:00 2001 From: Robin Riclet Date: Wed, 20 Jan 2021 17:53:50 +0100 Subject: [PATCH 91/93] Fixed audit for Procedure --- thehive/app/org/thp/thehive/services/AuditSrv.scala | 2 +- .../app/org/thp/thehive/services/ProcedureSrv.scala | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/thehive/app/org/thp/thehive/services/AuditSrv.scala b/thehive/app/org/thp/thehive/services/AuditSrv.scala index c020a2969d..8863e34e3a 100644 --- a/thehive/app/org/thp/thehive/services/AuditSrv.scala +++ b/thehive/app/org/thp/thehive/services/AuditSrv.scala @@ -47,7 +47,7 @@ class AuditSrv @Inject() ( val organisation = new SelfContextObjectAudit[Organisation] val page = new SelfContextObjectAudit[Page] val pattern = new SelfContextObjectAudit[Pattern] - val procedure = new SelfContextObjectAudit[Procedure] + val procedure = new ObjectAudit[Procedure, Case] val profile = new SelfContextObjectAudit[Profile] val share = new ShareAudit val task = new SelfContextObjectAudit[Task] diff --git a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala index 08245e5bc8..72b408cbb8 100644 --- a/thehive/app/org/thp/thehive/services/ProcedureSrv.scala +++ b/thehive/app/org/thp/thehive/services/ProcedureSrv.scala @@ -8,6 +8,7 @@ import org.thp.scalligraph.services._ import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs import org.thp.scalligraph.traversal.{Converter, StepLabel, Traversal} import org.thp.thehive.controllers.v1.Conversion._ +import org.thp.thehive.services.ProcedureOps._ import org.thp.thehive.models._ import java.util.{Map => JMap} @@ -34,7 +35,7 @@ class ProcedureSrv @Inject() ( _ <- caseProcedureSrv.create(CaseProcedure(), caze, procedure) _ <- procedurePatternSrv.create(ProcedurePattern(), procedure, pattern) richProcedure = RichProcedure(procedure, pattern) - _ <- auditSrv.procedure.create(procedure, richProcedure.toJson) + _ <- auditSrv.procedure.create(procedure, caze, richProcedure.toJson) } yield richProcedure override def get(idOrName: EntityIdOrName)(implicit graph: Graph): Traversal.V[Procedure] = @@ -42,8 +43,8 @@ class ProcedureSrv @Inject() ( def remove(procedure: Procedure with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { - organisation <- organisationSrv.getOrFail(authContext.organisation) - _ <- auditSrv.procedure.delete(procedure, organisation) + caze <- get(procedure).caze.getOrFail("Case") + _ <- auditSrv.procedure.delete(procedure, Some(caze)) } yield get(procedure).remove() } @@ -54,6 +55,9 @@ object ProcedureOps { def pattern: Traversal.V[Pattern] = traversal.out[ProcedurePattern].v[Pattern] + def caze: Traversal.V[Case] = + traversal.in[CaseProcedure].v[Case] + def richProcedure: Traversal[RichProcedure, JMap[String, Any], Converter[RichProcedure, JMap[String, Any]]] = { val procedure = StepLabel.v[Procedure] val pattern = StepLabel.v[Pattern] From c80e40563305d38d18e3885d2fcda51da9705d60 Mon Sep 17 00:00:00 2001 From: To-om Date: Thu, 21 Jan 2021 14:25:00 +0100 Subject: [PATCH 92/93] Rename pattern version to revision to prevent property conflict with taxonomy.version --- .../app/org/thp/thehive/controllers/v1/Conversion.scala | 8 ++++---- thehive/app/org/thp/thehive/models/Pattern.scala | 4 ++-- thehive/test/resources/data/Pattern.json | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala index e8de063541..79d6957549 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Conversion.scala @@ -272,7 +272,8 @@ object Conversion { implicit val taxonomyWithStatsOutput: Renderer.Aux[(RichTaxonomy, JsObject), OutputTaxonomy] = Renderer.toJson[(RichTaxonomy, JsObject), OutputTaxonomy] { taxoWithExtraData => - taxoWithExtraData._1 + taxoWithExtraData + ._1 .into[OutputTaxonomy] .withFieldComputed(_._id, _._id.toString) .withFieldConst(_._type, "Taxonomy") @@ -283,8 +284,7 @@ object Conversion { implicit val tagOutput: Renderer.Aux[Tag, OutputTag] = Renderer.toJson[Tag, OutputTag]( - _.into[OutputTag] - .transform + _.into[OutputTag].transform ) implicit class InputUserOps(inputUser: InputUser) { @@ -503,7 +503,7 @@ object Conversion { .withFieldRenamed(_.`type`, _.patternType) .withFieldRenamed(_.x_mitre_platforms, _.platforms) .withFieldRenamed(_.x_mitre_data_sources, _.dataSources) - .withFieldRenamed(_.x_mitre_version, _.version) + .withFieldRenamed(_.x_mitre_version, _.revision) .transform } diff --git a/thehive/app/org/thp/thehive/models/Pattern.scala b/thehive/app/org/thp/thehive/models/Pattern.scala index 3713901a05..bb03474404 100644 --- a/thehive/app/org/thp/thehive/models/Pattern.scala +++ b/thehive/app/org/thp/thehive/models/Pattern.scala @@ -15,7 +15,7 @@ case class Pattern( patternType: String, platforms: Seq[String], dataSources: Seq[String], - version: Option[String] + revision: Option[String] ) @BuildEdgeEntity[Pattern, Pattern] @@ -30,7 +30,7 @@ case class RichPattern(pattern: Pattern with Entity, parent: Option[Pattern with def patternType: String = pattern.patternType def platforms: Seq[String] = pattern.platforms def dataSources: Seq[String] = pattern.dataSources - def version: Option[String] = pattern.version + def version: Option[String] = pattern.revision def _id: EntityId = pattern._id def _createdAt: Date = pattern._createdAt def _createdBy: String = pattern._createdBy diff --git a/thehive/test/resources/data/Pattern.json b/thehive/test/resources/data/Pattern.json index f1e0166ca6..9a4a23e2ef 100644 --- a/thehive/test/resources/data/Pattern.json +++ b/thehive/test/resources/data/Pattern.json @@ -12,7 +12,7 @@ "patternType": "unit-test", "platforms": [], "dataSources": [], - "version": "1.0" + "revision": "1.0" }, { "id": "testPattern2", @@ -27,6 +27,6 @@ "patternType": "unit-test", "platforms": [], "dataSources": [], - "version": "1.1" + "revision": "1.1" } -] \ No newline at end of file +] From 69b951d399559278f950630d5009d94123a627ad Mon Sep 17 00:00:00 2001 From: To-om Date: Fri, 22 Jan 2021 18:22:54 +0100 Subject: [PATCH 93/93] #1759 Fix user deduplication checks --- thehive/app/org/thp/thehive/services/UserSrv.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/thehive/app/org/thp/thehive/services/UserSrv.scala b/thehive/app/org/thp/thehive/services/UserSrv.scala index b236698813..6cfff5b823 100644 --- a/thehive/app/org/thp/thehive/services/UserSrv.scala +++ b/thehive/app/org/thp/thehive/services/UserSrv.scala @@ -41,11 +41,6 @@ class UserSrv @Inject() ( val userAttachmentSrv = new EdgeSrv[UserAttachment, User, Attachment] - override def createEntity(e: User)(implicit graph: Graph, authContext: AuthContext): Try[User with Entity] = { - integrityCheckActor ! EntityAdded("User") - super.createEntity(e) - } - def checkUser(user: User): Try[User] = { val login = if (!user.login.contains('@') && defaultUserDomain.isDefined) s"${user.login}@${defaultUserDomain.get}".toLowerCase @@ -64,6 +59,7 @@ class UserSrv @Inject() ( roleSrv.create(user, organisation, profile) else Success(())).flatMap { _ => + integrityCheckActor ! EntityAdded("User") for { richUser <- get(user).richUser(authContext, organisation._id).getOrFail("User") _ <- auditSrv.user.create(user, richUser.toJson)