Skip to content

Commit

Permalink
#1731 Fix some tests
Browse files Browse the repository at this point in the history
  • Loading branch information
To-om committed Mar 1, 2021
1 parent 02e6ef7 commit 8183457
Show file tree
Hide file tree
Showing 13 changed files with 358 additions and 149 deletions.
9 changes: 6 additions & 3 deletions thehive/app/org/thp/thehive/services/TaxonomySrv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import javax.inject.{Inject, Provider, Singleton}
import scala.util.{Failure, Success, Try}

@Singleton
class TaxonomySrv @Inject() (organisationSrvProvider: Provider[OrganisationSrv]) extends VertexSrv[Taxonomy] {
class TaxonomySrv @Inject() (organisationSrvProvider: Provider[OrganisationSrv], tagSrv: TagSrv) extends VertexSrv[Taxonomy] {

lazy val organisationSrv: OrganisationSrv = organisationSrvProvider.get
val taxonomyTagSrv = new EdgeSrv[TaxonomyTag, Taxonomy, Tag]
Expand Down Expand Up @@ -80,8 +80,11 @@ object TaxonomyOps {
else
traversal.filter(_.organisations.get(authContext.organisation))

private def noFreetags: Traversal.V[Taxonomy] =
traversal.filterNot(_.has(_.namespace, TextP.startingWith("_freetags")))
def noFreetags: Traversal.V[Taxonomy] =
traversal.has(_.namespace, TextP.notStartingWith("_freetags"))

def freetags: Traversal.V[Taxonomy] =
traversal.has(_.namespace, TextP.startingWith("_freetags"))

def alreadyImported(namespace: String): Boolean =
traversal.getByNamespace(namespace).exists
Expand Down
184 changes: 181 additions & 3 deletions thehive/test/org/thp/thehive/DatabaseBuilder.scala
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
package org.thp.thehive

import org.apache.tinkerpop.gremlin.structure.Vertex
import org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality
import org.scalactic.Or
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.controllers._
import org.thp.scalligraph.models.{Database, Entity, Schema}
import org.thp.scalligraph.models.{Database, Entity, Schema, UMapping}
import org.thp.scalligraph.services.{EdgeSrv, GenIntegrityCheckOps, VertexSrv}
import org.thp.scalligraph.traversal.Graph
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.traversal.{Converter, Graph}
import org.thp.scalligraph.{EntityId, EntityName, RichOption}
import org.thp.thehive.models._
import org.thp.thehive.services.OrganisationOps._
import org.thp.thehive.services.TaxonomyOps._
import org.thp.thehive.services._
import play.api.Logger
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
Expand Down Expand Up @@ -106,7 +111,7 @@ class DatabaseBuilder @Inject() (
createEdge(roleSrv.roleOrganisationSrv, roleSrv, organisationSrv, FieldsParser[RoleOrganisation], idMap)
createEdge(roleSrv.roleProfileSrv, roleSrv, profileSrv, FieldsParser[RoleProfile], idMap)

// createEdge(observableSrv.observableKeyValueSrv, observableSrv, keyValueSrv, FieldsParser[ObservableKeyValue], idMap)
// createEdge(observableSrv.observableKeyValueSrv, observableSrv, keyValueSrv, FieldsParser[ObservableKeyValue], idMap)
createEdge(observableSrv.observableObservableType, observableSrv, observableTypeSrv, FieldsParser[ObservableObservableType], idMap)
createEdge(observableSrv.observableDataSrv, observableSrv, dataSrv, FieldsParser[ObservableData], idMap)
createEdge(observableSrv.observableAttachmentSrv, observableSrv, attachmentSrv, FieldsParser[ObservableAttachment], idMap)
Expand Down Expand Up @@ -145,6 +150,179 @@ class DatabaseBuilder @Inject() (
createEdge(procedureSrv.caseProcedureSrv, caseSrv, procedureSrv, FieldsParser[CaseProcedure], idMap)
createEdge(procedureSrv.procedurePatternSrv, procedureSrv, patternSrv, FieldsParser[ProcedurePattern], idMap)

// For each organisation, if there is no custom taxonomy, create it
organisationSrv
.startTraversal
.hasNot(_.name, "admin")
.filterNot(_.taxonomies.freetags)
.foreach(o => taxonomySrv.createFreetag(o))

// TODO: get tags from entity and create freetag for each
// // Add each tag to its Organisation's FreeTags taxonomy
// caseSrv
// .startTraversal
// .project(_.by
// .by(_.value(_.tags).fold)
// .by(_.organisations.taxonomies.freetags))
// .foreach {
// case (case0, tags, freeTaxo) =>
// for {
// t <- tags.toTry(tagSrv.getOrCreate)
// _ <- t.toTry(caseSrv.caseTagSrv.create(CaseTag(), case0, _))
// }
// }

alertSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.out("AlertTag").valueMap("namespace", "predicate", "value").fold)
.by(_.out("AlertOrganisation")._id.option)
.by(_.out("AlertCase")._id.option)
)
.foreach {
case (vertex, tagMaps, Some(organisationId), caseId) =>
val tags = for {
tag <- tagMaps.asInstanceOf[Seq[Map[String, String]]]
namespace = tag.getOrElse("namespace", "_autocreate")
predicate <- tag.get("predicate")
value = tag.get("value")
} yield
(if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') +
(if (predicate.headOption.getOrElse('_') == '_') "" else predicate) +
value.fold("")(v => f"""="$v"""")

tags.foreach(vertex.property(Cardinality.list, "tags", _))
vertex.property("organisationId", organisationId.value)
caseId.foreach(vertex.property("caseId", _))
case _ =>
}

caseSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.out("CaseTag").valueMap("namespace", "predicate", "value").fold)
.by(_.out("CaseUser").property("login", UMapping.string).option)
.by(_.in("ShareCase").in("OrganisationShare")._id.fold)
.by(_.out("CaseImpactStatus").property("value", UMapping.string).option)
.by(_.out("CaseResolutionStatus").property("value", UMapping.string).option)
.by(_.out("CaseCaseTemplate").property("name", UMapping.string).option)
)
.foreach {
case (vertex, tagMaps, assignee, organisationIds, impactStatus, resolutionStatus, caseTemplate) =>
val tags = for {
tag <- tagMaps.asInstanceOf[Seq[Map[String, String]]]
namespace = tag.getOrElse("namespace", "_autocreate")
predicate <- tag.get("predicate")
value = tag.get("value")
} yield
(if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') +
(if (predicate.headOption.getOrElse('_') == '_') "" else predicate) +
value.fold("")(v => f"""="$v"""")

tags.foreach(vertex.property(Cardinality.list, "tags", _))
assignee.foreach(vertex.property("assignee", _))
organisationIds.foreach(id => vertex.property(Cardinality.set, "organisationIds", id.value))
impactStatus.foreach(vertex.property("impactStatus", _))
resolutionStatus.foreach(vertex.property("resolutionStatus", _))
caseTemplate.foreach(vertex.property("caseTemplate", _))
}

caseTemplateSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.out("CaseTemplateTag").valueMap("namespace", "predicate", "value").fold)
)
.foreach {
case (vertex, tagMaps) =>
val tags = for {
tag <- tagMaps.asInstanceOf[Seq[Map[String, String]]]
namespace = tag.getOrElse("namespace", "_autocreate")
predicate <- tag.get("predicate")
value = tag.get("value")
} yield
(if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') +
(if (predicate.headOption.getOrElse('_') == '_') "" else predicate) +
value.fold("")(v => f"""="$v"""")

tags.foreach(vertex.property(Cardinality.list, "tags", _))
}

logSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.in("TaskLog")._id)
.by(_.in("TaskLog").in("ShareTask").in("OrganisationShare")._id.fold)
)
.foreach {
case (vertex, taskId, organisationIds) =>
vertex.property("taskId", taskId)
organisationIds.foreach(id => vertex.property(Cardinality.set, "organisationIds", id.value))
}

observableSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.out("ObservableObservableType").property("name", UMapping.string))
.by(_.out("ObservableTag").valueMap("namespace", "predicate", "value").fold)
.by(_.out("ObservableData").property("data", UMapping.string).option)
.by(_.out("ObservableAttachment").property("attachmentId", UMapping.string).option)
.by(_.coalesceIdent(_.in("ShareObservable").out("ShareCase"), _.in("AlertObservable"), _.in("ReportObservable"))._id.option)
.by(
_.coalesceIdent(
_.optional(_.in("ReportObservable").in("ObservableJob")).in("ShareObservable").in("OrganisationShare"),
_.in("AlertObservable").out("AlertOrganisation")
)
._id
.fold
)
)
.foreach {
case (vertex, dataType, tagMaps, data, attachmentId, Some(relatedId), organisationIds) =>
val tags = for {
tag <- tagMaps.asInstanceOf[Seq[Map[String, String]]]
namespace = tag.getOrElse("namespace", "_autocreate")
predicate <- tag.get("predicate")
value = tag.get("value")
} yield
(if (namespace.headOption.getOrElse('_') == '_') "" else namespace + ':') +
(if (predicate.headOption.getOrElse('_') == '_') "" else predicate) +
value.fold("")(v => f"""="$v"""")

vertex.property("dataType", dataType)
tags.foreach(vertex.property(Cardinality.list, "tags", _))
data.foreach(vertex.property("data", _))
attachmentId.foreach(vertex.property("attachmentId", _))
vertex.property("relatedId", relatedId.value)
organisationIds.foreach(id => vertex.property(Cardinality.set, "organisationIds", id.value))
case _ =>
}

taskSrv
.startTraversal
.setConverter[Vertex, Converter.Identity[Vertex]](Converter.identity[Vertex])
.project(
_.by
.by(_.out("TaskUser").property("login", UMapping.string).option)
.by(_.coalesceIdent(_.in("ShareTask").out("ShareCase"), _.in("CaseTemplateTask"))._id.option)
.by(_.coalesceIdent(_.in("ShareTask").in("OrganisationShare"), _.in("CaseTemplateTask").out("CaseTemplateOrganisation"))._id.fold)
)
.foreach {
case (vertex, assignee, Some(relatedId), organisationIds) =>
assignee.foreach(vertex.property("assignee", _))
vertex.property("relatedId", relatedId.value)
organisationIds.foreach(id => vertex.property(Cardinality.set, "organisationIds", id.value))
case _ =>
}
Success(())
}
}
Expand Down
41 changes: 23 additions & 18 deletions thehive/test/org/thp/thehive/TestAppBuilder.scala
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
package org.thp.thehive

import java.io.File
import java.nio.file.{Files, Paths}
import akka.actor.ActorSystem
import com.google.inject.Injector

import javax.inject.{Inject, Provider, Singleton}
import org.apache.commons.io.FileUtils
import org.thp.scalligraph.auth._
import org.thp.scalligraph.janus.JanusDatabase
import org.thp.scalligraph.models.{Database, Schema}
import org.thp.scalligraph.janus.JanusDatabaseProvider
import org.thp.scalligraph.models.{Database, Schema, UpdatableSchema}
import org.thp.scalligraph.query.QueryExecutor
import org.thp.scalligraph.services.{GenIntegrityCheckOps, LocalFileSystemStorageSrv, StorageSrv}
import org.thp.scalligraph.AppBuilder
import org.thp.scalligraph.{AppBuilder, SingleInstance}
import org.thp.thehive.controllers.v0.TheHiveQueryExecutor
import org.thp.thehive.models.TheHiveSchemaDefinition
import org.thp.thehive.services.notification.notifiers.{AppendToFileProvider, EmailerProvider, NotifierProvider}
import org.thp.thehive.services.notification.triggers._
import org.thp.thehive.services.{UserSrv => _, _}

import java.io.File
import java.nio.file.{Files, Paths}
import javax.inject.{Inject, Provider, Singleton}
import scala.util.Try

object TestAppBuilderLock
Expand All @@ -32,6 +29,7 @@ trait TestAppBuilder {
.bind[UserSrv, LocalUserSrv]
.bind[StorageSrv, LocalFileSystemStorageSrv]
.bind[Schema, TheHiveSchemaDefinition]
.multiBind[UpdatableSchema](classOf[TheHiveSchemaDefinition])
.bindNamed[QueryExecutor, TheHiveQueryExecutor]("v0")
.multiBind[AuthSrvProvider](classOf[LocalPasswordAuthProvider], classOf[LocalKeyAuthProvider], classOf[HeaderAuthProvider])
.multiBind[NotifierProvider](classOf[AppendToFileProvider])
Expand All @@ -41,6 +39,7 @@ trait TestAppBuilder {
.multiBind[TriggerProvider](classOf[TaskAssignedProvider])
.multiBind[TriggerProvider](classOf[AlertCreatedProvider])
.bindToProvider[AuthSrv, MultiAuthSrvProvider]
.bindInstance[SingleInstance](new SingleInstance(true))
.multiBind[GenIntegrityCheckOps](
classOf[ProfileIntegrityCheckOps],
classOf[OrganisationIntegrityCheckOps],
Expand All @@ -52,7 +51,8 @@ trait TestAppBuilder {
classOf[CustomFieldIntegrityCheckOps],
classOf[CaseTemplateIntegrityCheckOps],
classOf[DataIntegrityCheckOps],
classOf[CaseIntegrityCheckOps]
classOf[CaseIntegrityCheckOps],
classOf[AlertIntegrityCheckOps]
)
.bindActor[DummyActor]("config-actor")
.bindActor[DummyActor]("notification-actor")
Expand All @@ -63,10 +63,11 @@ trait TestAppBuilder {
.addConfiguration("play.mailer.mock = yes")
.addConfiguration("play.mailer.debug = yes")
.addConfiguration(s"storage.localfs.location = ${System.getProperty("user.dir")}/target/storage")
.bindEagerly[AkkaGuiceExtensionSetup]
.bindEagerly[ClusterSetup]

def testApp[A](body: AppBuilder => A): A = {
val storageDirectory = Files.createTempDirectory(Paths.get("target"), "janusgraph-test-database").toFile
val indexDirectory = Files.createTempDirectory(Paths.get("target"), storageDirectory.getName).toFile
TestAppBuilderLock.synchronized {
if (!Files.exists(Paths.get(s"target/janusgraph-test-database-$databaseName"))) {
val app = appConfigure
Expand All @@ -77,26 +78,35 @@ trait TestAppBuilder {
| storage.backend: berkeleyje
| storage.directory: "target/janusgraph-test-database-$databaseName"
| berkeleyje.freeDisk: 2
| index.search {
| backend : lucene
| directory: target/janusgraph-test-database-$databaseName-idx
| }
| }
|}
|akka.cluster.jmx.multi-mbeans-in-same-jvm: on
|""".stripMargin)
.bind[Database, JanusDatabase]
.bindToProvider[Database, JanusDatabaseProvider]

app[DatabaseBuilder].build()(app[Database], app[UserSrv].getSystemAuthContext)
app[Database].close()
}
FileUtils.copyDirectory(new File(s"target/janusgraph-test-database-$databaseName"), storageDirectory)
FileUtils.copyDirectory(new File(s"target/janusgraph-test-database-$databaseName-idx"), indexDirectory)
}
val app = appConfigure
.bind[Database, JanusDatabase]
.bindToProvider[Database, JanusDatabaseProvider]
.addConfiguration(s"""
|db {
| provider: janusgraph
| janusgraph {
| storage.backend: berkeleyje
| storage.directory: $storageDirectory
| berkeleyje.freeDisk: 2
| index.search {
| backend : lucene
| directory: $indexDirectory
| }
| }
|}
|""".stripMargin)
Expand All @@ -113,8 +123,3 @@ trait TestAppBuilder {
class BasicDatabaseProvider @Inject() (database: Database) extends Provider[Database] {
override def get(): Database = database
}

@Singleton
class AkkaGuiceExtensionSetup @Inject() (system: ActorSystem, injector: Injector) {
GuiceAkkaExtension(system).set(injector)
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ class ConfigCtrlTest extends PlaySpecification with TestAppBuilder {

"set configuration item" in testApp { app =>
app[TagSrv]
val request = FakeRequest("PUT", "/api/config/tags.defaultColour")
val request = FakeRequest("PUT", "/api/config/tags.freeTagColour")
.withHeaders("user" -> "[email protected]")
.withJsonBody(Json.parse("""{"value": "#00FF00"}"""))
val result = app[ConfigCtrl].set("tags.defaultColour")(request)
val result = app[ConfigCtrl].set("tags.freeTagColour")(request)

status(result) must equalTo(204).updateMessage(s => s"$s\n${contentAsString(result)}")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import org.thp.scalligraph.{AppBuilder, ScalligraphApplicationLoader}
import org.thp.thehive.models.HealthStatus
import org.thp.thehive.services.Connector
import org.thp.thehive.{TestAppBuilder, TheHiveModule}
import play.api.libs.json.{JsObject, Json}
import play.api.libs.json.{JsNull, JsObject, Json}
import play.api.mvc.AbstractController
import play.api.test.{FakeRequest, PlaySpecification}
import play.api.{Configuration, Environment}
Expand Down Expand Up @@ -67,8 +67,10 @@ class StatusCtrlTest extends PlaySpecification with TestAppBuilder {
"authType" -> Seq("local", "key", "header"),
"capabilities" -> Seq("changePassword", "setPassword", "authByKey"),
"ssoAutoLogin" -> config.get[Boolean]("user.autoCreateOnSso"),
"pollingDuration" -> 1000,
"schemaStatus" -> Json.arr()
"pollingDuration" -> 1000
),
"schemaStatus" -> Json.arr(
Json.obj("name" -> "thehive", "currentVersion" -> 54, "expectedVersion" -> 54, "error" -> JsNull)
)
)

Expand Down
Loading

0 comments on commit 8183457

Please sign in to comment.