Skip to content

Commit

Permalink
#1731 Database initialisation refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
To-om committed Jan 5, 2021
1 parent 4e3e544 commit 36b82a1
Show file tree
Hide file tree
Showing 100 changed files with 186 additions and 198 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ package org.thp.thehive.connector.cortex

import com.google.inject.AbstractModule
import net.codingwell.scalaguice.{ScalaModule, ScalaMultibinder}
import org.thp.scalligraph.models.{Database, Schema}
import org.thp.scalligraph.models.{Schema, UpdatableSchema}
import org.thp.scalligraph.query.QueryExecutor
import org.thp.thehive.connector.cortex.controllers.v0.{CortexQueryExecutor => CortexQueryExecutorV0}
import org.thp.thehive.connector.cortex.models.{CortexSchemaDefinition, DatabaseProvider}
import org.thp.thehive.connector.cortex.models.CortexSchemaDefinition
import org.thp.thehive.connector.cortex.services.notification.notifiers.{RunAnalyzerProvider, RunResponderProvider}
import org.thp.thehive.connector.cortex.services.{Connector, CortexActor}
import org.thp.thehive.services.notification.notifiers.NotifierProvider
Expand All @@ -24,14 +24,13 @@ class CortexModule(environment: Environment, configuration: Configuration) exten
queryExecutorBindings.addBinding.to[CortexQueryExecutorV0]
val connectorBindings = ScalaMultibinder.newSetBinder[TheHiveConnector](binder)
connectorBindings.addBinding.to[Connector]
val schemaBindings = ScalaMultibinder.newSetBinder[Schema](binder)
val schemaBindings = ScalaMultibinder.newSetBinder[UpdatableSchema](binder)
schemaBindings.addBinding.to[CortexSchemaDefinition]

val notifierBindings = ScalaMultibinder.newSetBinder[NotifierProvider](binder)
notifierBindings.addBinding.to[RunResponderProvider]
notifierBindings.addBinding.to[RunAnalyzerProvider]

bind[Database].annotatedWithName("with-thehive-cortex-schema").toProvider[DatabaseProvider]
bindActor[CortexActor]("cortex-actor")
()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import scala.reflect.runtime.{universe => ru}
@Singleton
class ActionCtrl @Inject() (
override val entrypoint: Entrypoint,
@Named("with-thehive-schema") override val db: Database,
override val db: Database,
actionSrv: ActionSrv,
entityHelper: EntityHelper,
caseSrv: CaseSrv,
Expand Down Expand Up @@ -76,7 +76,7 @@ class ActionCtrl @Inject() (
}

@Singleton
class PublicAction @Inject() (actionSrv: ActionSrv, @Named("with-thehive-schema") db: Database) extends PublicData {
class PublicAction @Inject() (actionSrv: ActionSrv, db: Database) extends PublicData {

override val entityName: String = "action"
override val initialQuery: Query =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import scala.util.{Failure, Success}
@Singleton
class AnalyzerTemplateCtrl @Inject() (
override val entrypoint: Entrypoint,
@Named("with-thehive-cortex-schema") override val db: Database,
override val db: Database,
analyzerTemplateSrv: AnalyzerTemplateSrv,
@Named("v0") override val queryExecutor: QueryExecutor,
override val publicData: PublicAnalyzerTemplate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ import org.thp.thehive.services.ObservableOps._
import scala.reflect.runtime.{universe => ru}

@Singleton
class CortexQueryExecutor @Inject() (
@Named("with-thehive-cortex-schema") implicit override val db: Database,
class CortexQueryExecutor @Inject() (implicit
override val db: Database,
job: PublicJob,
report: PublicAnalyzerTemplate,
action: PublicAction,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.concurrent.{ExecutionContext, Future}
@Singleton
class JobCtrl @Inject() (
override val entrypoint: Entrypoint,
@Named("with-thehive-cortex-schema") override val db: Database,
override val db: Database,
jobSrv: JobSrv,
observableSrv: ObservableSrv,
errorHandler: ErrorHandler,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import scala.concurrent.ExecutionContext
@Singleton
class ResponderCtrl @Inject() (
entrypoint: Entrypoint,
@Named("with-thehive-cortex-schema") implicit val db: Database,
implicit val db: Database,
responderSrv: ResponderSrv,
implicit val ex: ExecutionContext
) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ import javax.inject.{Inject, Singleton}
import org.reflections.Reflections
import org.reflections.scanners.SubTypesScanner
import org.reflections.util.ConfigurationBuilder
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.models._
import org.thp.thehive.services.LocalUserSrv
import play.api.Logger

import scala.collection.JavaConverters._
Expand Down Expand Up @@ -39,4 +41,5 @@ class CortexSchemaDefinition @Inject() () extends Schema with UpdatableSchema {
}
.toSeq
}
override val authContext: AuthContext = LocalUserSrv.getSystemAuthContext
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class ActionSrv @Inject() (
logSrv: LogSrv,
connector: Connector,
implicit val schema: Schema,
@Named("with-thehive-cortex-schema") implicit val db: Database,
implicit val db: Database,
implicit val ec: ExecutionContext,
auditSrv: CortexAuditSrv
) extends VertexSrv[Action] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import scala.util.{Failure, Try}

@Singleton
class AnalyzerTemplateSrv @Inject() (implicit
@Named("with-thehive-cortex-schema") db: Database,
db: Database,
auditSrv: CortexAuditSrv,
organisationSrv: OrganisationSrv
) extends VertexSrv[AnalyzerTemplate] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class JobSrv @Inject() (
reportTagSrv: ReportTagSrv,
serviceHelper: ServiceHelper,
auditSrv: CortexAuditSrv,
@Named("with-thehive-schema") implicit val db: Database,
implicit val db: Database,
implicit val ec: ExecutionContext,
implicit val mat: Materializer
) extends VertexSrv[Job] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import scala.util.{Failure, Success}
@Singleton
class ResponderSrv @Inject() (
connector: Connector,
@Named("with-thehive-cortex-schema") db: Database,
db: Database,
entityHelper: EntityHelper,
serviceHelper: ServiceHelper,
implicit val ec: ExecutionContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import play.api.Logger

@Singleton
class ServiceHelper @Inject() (
@Named("with-thehive-cortex-schema") db: Database,
db: Database,
organisationSrv: OrganisationSrv
) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@ import org.thp.thehive.{BasicDatabaseProvider, TestAppBuilder}
import play.api.test.{FakeRequest, PlaySpecification}

class AnalyzerCtrlTest extends PlaySpecification with TestAppBuilder {
override def appConfigure: AppBuilder =
super
.appConfigure
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

"analyzer controller" should {
"list analyzers" in testApp { app =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,6 @@ import play.api.test.{FakeRequest, PlaySpecification}
import scala.util.Random

class AnalyzerTemplateCtrlTest extends PlaySpecification with TestAppBuilder {
override def appConfigure: AppBuilder =
super
.appConfigure
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

"report controller" should {
// "create, fetch, update and delete a template" in testApp {app =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ class JobCtrlTest extends PlaySpecification with TestAppBuilder {
.bind[Connector, TestConnector]
.bindToProvider[Schema, TheHiveCortexSchemaProvider]
.bindNamedToProvider[QueryExecutor, TheHiveCortexQueryExecutorProvider]("v0")
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")
)

"job controller" should {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ class ActionSrvTest extends PlaySpecification with TestAppBuilder {
.bind[Connector, TestConnector]
.bindToProvider[Schema, TheHiveCortexSchemaProvider]
)
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

def testAppBuilder[A](body: AppBuilder => A): A =
testApp { app =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ class AnalyzerSrvTest extends PlaySpecification with TestAppBuilder {
.bind[Connector, TestConnector]
.bindToProvider[Schema, TheHiveCortexSchemaProvider]
)
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

implicit val authContext: AuthContext =
DummyUserSrv(userId = "[email protected]", organisation = "cert", permissions = Permissions.all).authContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ class JobSrvTest extends PlaySpecification with TestAppBuilder {
.bindActor[CortexActor]("cortex-actor")
.bindToProvider[CortexClient, TestCortexClientProvider]
.bind[Connector, TestConnector]
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")
.`override`(_.bindToProvider[Schema, TheHiveCortexSchemaProvider])

"job service" should {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ class ResponderSrvTest extends PlaySpecification with TestAppBuilder {
.bind[Connector, TestConnector]
.bindToProvider[Schema, TheHiveCortexSchemaProvider]
)
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

"responder service" should {
"fetch responders by type" in testApp { app =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ class ServiceHelperTest extends PlaySpecification with TestAppBuilder {
.bind[Connector, TestConnector]
.bindToProvider[Schema, TheHiveCortexSchemaProvider]
)
.bindNamedToProvider[Database, BasicDatabaseProvider]("with-thehive-cortex-schema")

"service helper" should {
"filter properly organisations according to supplied config" in testApp { app =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ import scala.util.{Success, Try}
class NoAuditSrv @Inject() (
userSrvProvider: Provider[UserSrv],
@Named("notification-actor") notificationActor: ActorRef,
eventSrv: EventSrv
)(implicit @Named("with-thehive-schema") db: Database)
extends AuditSrv(userSrvProvider, notificationActor, eventSrv)(db) {
eventSrv: EventSrv,
db: Database
) extends AuditSrv(userSrvProvider, notificationActor, eventSrv, db) {

override def create(audit: Audit, context: Option[Product with Entity], `object`: Option[Product with Entity])(
implicit graph: Graph,
override def create(audit: Audit, context: Option[Product with Entity], `object`: Option[Product with Entity])(implicit
graph: Graph,
authContext: AuthContext
): Try[Unit] =
Success(())
Expand Down
33 changes: 18 additions & 15 deletions migration/src/main/scala/org/thp/thehive/migration/th4/Output.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ package org.thp.thehive.migration.th4
import akka.actor.ActorSystem
import akka.stream.Materializer
import com.google.inject.Guice
import javax.inject.{Inject, Named, Provider, Singleton}

import javax.inject.{Inject, Provider, Singleton}
import net.codingwell.scalaguice.ScalaModule
import org.apache.tinkerpop.gremlin.process.traversal.P
import org.thp.scalligraph._
Expand All @@ -15,6 +16,7 @@ import org.thp.scalligraph.traversal.{Graph, Traversal}
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.thehive.connector.cortex.models.{CortexSchemaDefinition, TheHiveCortexSchemaProvider}
import org.thp.thehive.connector.cortex.services.{ActionSrv, JobSrv}
import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.dto.v1.InputCustomFieldValue
import org.thp.thehive.migration
import org.thp.thehive.migration.IdMapping
Expand All @@ -31,7 +33,6 @@ import play.api.{Configuration, Environment, Logger}
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext
import scala.util.{Failure, Success, Try}
import org.thp.thehive.controllers.v1.Conversion._

object Output {

Expand All @@ -54,8 +55,7 @@ object Output {

bind[AuditSrv].to[NoAuditSrv]
bind[Database].to[JanusDatabase]
bind[Database].annotatedWithName("with-thehive-schema").toProvider[BasicDatabaseProvider]
bind[Database].annotatedWithName("with-thehive-cortex-schema").toProvider[BasicDatabaseProvider]
bind[Database].toProvider[BasicDatabaseProvider]
bind[Configuration].toInstance(configuration)
bind[Environment].toInstance(Environment.simple())
bind[ApplicationLifecycle].to[DefaultApplicationLifecycle]
Expand All @@ -74,7 +74,7 @@ object Output {
def apply(configuration: Configuration)(implicit actorSystem: ActorSystem): Output = {
if (configuration.getOptional[Boolean]("dropDatabase").contains(true)) {
Logger(getClass).info("Drop database")
new JanusDatabase(configuration, actorSystem).drop()
new JanusDatabase(configuration, actorSystem, fullTextIndexAvailable = false).drop()
}
buildApp(configuration).getInstance(classOf[Output])
}
Expand Down Expand Up @@ -109,7 +109,7 @@ class Output @Inject() (
resolutionStatusSrv: ResolutionStatusSrv,
jobSrv: JobSrv,
actionSrv: ActionSrv,
@Named("with-thehive-schema") db: Database,
db: Database,
cache: SyncCacheApi
) extends migration.Output {
lazy val logger: Logger = Logger(getClass)
Expand Down Expand Up @@ -232,11 +232,11 @@ class Output @Inject() (
| ${alerts.size} alerts""".stripMargin)
}

def startMigration(): Try[Unit] = {
db match {
case jdb: JanusDatabase => jdb.dropOtherConnections.recover { case error => logger.error(s"Fail to remove other connection", error) }
case _ =>
}
def startMigration(): Try[Unit] =
// db match {
// case jdb: JanusDatabase => jdb.dropOtherConnections.recover { case error => logger.error(s"Fail to remove other connection", error) }
// case _ =>
// }
if (db.version("thehive") == 0)
db.createSchemaFrom(theHiveSchema)(LocalUserSrv.getSystemAuthContext)
.flatMap(_ => db.setVersion(theHiveSchema.name, theHiveSchema.operations.lastVersion))
Expand All @@ -245,13 +245,12 @@ class Output @Inject() (
.map(_ => retrieveExistingData())
else
theHiveSchema
.update(db)(LocalUserSrv.getSystemAuthContext)
.flatMap(_ => cortexSchema.update(db)(LocalUserSrv.getSystemAuthContext))
.update(db)
.flatMap(_ => cortexSchema.update(db))
.map { _ =>
retrieveExistingData()
db.removeAllIndexes()
}
}

def endMigration(): Try[Unit] = {
db.addSchemaIndexes(theHiveSchema)
Expand Down Expand Up @@ -496,7 +495,11 @@ class Output @Inject() (
override def createCase(inputCase: InputCase): Try[IdMapping] =
authTransaction(inputCase.metaData.createdBy) { implicit graph => implicit authContext =>
logger.debug(s"Create case #${inputCase.`case`.number}")
caseSrv.createEntity(inputCase.`case`).map { createdCase =>
val organisationIds = inputCase.organisations.flatMap {
case (orgName, _) => getOrganisation(orgName).map(_._id).toOption
}
val `case` = inputCase.`case`.copy(organisationIds = organisationIds.toSeq)
caseSrv.createEntity(`case`).map { createdCase =>
updateMetaData(createdCase, inputCase.metaData)
inputCase
.user
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class MispCtrl @Inject() (
mispExportSrv: MispExportSrv,
alertSrv: AlertSrv,
caseSrv: CaseSrv,
@Named("with-thehive-schema") db: Database,
db: Database,
@Named("misp-actor") mispActor: ActorRef,
implicit val ec: ExecutionContext
) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import org.thp.scalligraph.models._
import org.thp.scalligraph.traversal.Graph
import org.thp.scalligraph.traversal.TraversalOps._
import org.thp.scalligraph.utils.FunctionalCondition._
import org.thp.scalligraph.{EntityName, RichSeq}
import org.thp.scalligraph.{EntityId, EntityName, RichSeq}
import org.thp.thehive.controllers.v1.Conversion._
import org.thp.thehive.models._
import org.thp.thehive.services.AlertOps._
Expand All @@ -37,15 +37,15 @@ class MispImportSrv @Inject() (
observableTypeSrv: ObservableTypeSrv,
attachmentSrv: AttachmentSrv,
caseTemplateSrv: CaseTemplateSrv,
db: Database,
auditSrv: AuditSrv,
@Named("with-thehive-schema") db: Database,
implicit val ec: ExecutionContext,
implicit val mat: Materializer
) {

lazy val logger: Logger = Logger(getClass)

def eventToAlert(client: TheHiveMispClient, event: Event): Try[Alert] =
def eventToAlert(client: TheHiveMispClient, event: Event, organisationId: EntityId): Try[Alert] =
client
.currentOrganisationName
.map { mispOrganisation =>
Expand All @@ -70,7 +70,8 @@ class MispImportSrv @Inject() (
.getOrElse(2),
pap = 2,
read = false,
follow = true
follow = true,
organisationId = organisationId
)
}

Expand Down Expand Up @@ -335,7 +336,7 @@ class MispImportSrv @Inject() (
caseTemplate: Option[CaseTemplate with Entity]
)(implicit graph: Graph, authContext: AuthContext): Try[(Alert with Entity, JsObject)] = {
logger.debug(s"updateOrCreateAlert ${client.name}#${event.id} for organisation ${organisation.name}")
eventToAlert(client, event).flatMap { alert =>
eventToAlert(client, event, organisation._id).flatMap { alert =>
organisationSrv
.get(organisation)
.alerts
Expand Down
Loading

0 comments on commit 36b82a1

Please sign in to comment.