Skip to content

Commit

Permalink
#1404 Provide database instance to all service only when initialisati…
Browse files Browse the repository at this point in the history
…on is complete
  • Loading branch information
To-om committed Jun 25, 2020
1 parent 1bc1f18 commit a7cdece
Show file tree
Hide file tree
Showing 100 changed files with 1,002 additions and 643 deletions.
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
package org.thp.thehive.connector.cortex

import play.api.libs.concurrent.AkkaGuiceSupport
import play.api.routing.{Router => PlayRouter}
import play.api.{Configuration, Environment, Logger}

import com.google.inject.AbstractModule
import net.codingwell.scalaguice.{ScalaModule, ScalaMultibinder}
import org.thp.scalligraph.models.Schema
import org.thp.scalligraph.models.{Database, Schema}
import org.thp.scalligraph.query.QueryExecutor
import org.thp.thehive.connector.cortex.controllers.v0.{CortexQueryExecutor => CortexQueryExecutorV0}
import org.thp.thehive.connector.cortex.models.{CortexSchema, SchemaUpdater}
import org.thp.thehive.connector.cortex.models.{CortexSchemaDefinition, DatabaseProvider}
import org.thp.thehive.connector.cortex.services.{Connector, CortexActor}
import org.thp.thehive.services.{Connector => TheHiveConnector}
import play.api.libs.concurrent.AkkaGuiceSupport
import play.api.routing.{Router => PlayRouter}
import play.api.{Configuration, Environment, Logger}

class CortexModule(environment: Environment, configuration: Configuration) extends AbstractModule with ScalaModule with AkkaGuiceSupport {
lazy val logger: Logger = Logger(getClass)
Expand All @@ -24,9 +23,9 @@ class CortexModule(environment: Environment, configuration: Configuration) exten
val connectorBindings = ScalaMultibinder.newSetBinder[TheHiveConnector](binder)
connectorBindings.addBinding.to[Connector]
val schemaBindings = ScalaMultibinder.newSetBinder[Schema](binder)
schemaBindings.addBinding.to[CortexSchema]
schemaBindings.addBinding.to[CortexSchemaDefinition]

bind[SchemaUpdater].asEagerSingleton()
bind[Database].annotatedWithName("with-thehive-cortex-schema").toProvider[DatabaseProvider]
bindActor[CortexActor]("cortex-actor")
()
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.thp.thehive.connector.cortex.controllers.v0

import javax.inject.{Inject, Singleton}
import javax.inject.{Inject, Named, Singleton}
import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser}
import org.thp.scalligraph.models.{Database, Entity}
import org.thp.scalligraph.query.{ParamQuery, PublicProperty, Query}
Expand All @@ -22,7 +22,7 @@ import scala.concurrent.{ExecutionContext, Future}
@Singleton
class ActionCtrl @Inject() (
entrypoint: Entrypoint,
db: Database,
@Named("with-thehive-schema") db: Database,
properties: Properties,
actionSrv: ActionSrv,
entityHelper: EntityHelper,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package org.thp.thehive.connector.cortex.controllers.v0

import java.util.zip.ZipFile

import com.google.inject.name.Named
import javax.inject.{Inject, Singleton}
import org.thp.scalligraph.controllers.{Entrypoint, FFile, FieldsParser}
import org.thp.scalligraph.models.{Database, Entity}
Expand All @@ -24,7 +25,7 @@ import scala.util.{Failure, Success}
@Singleton
class AnalyzerTemplateCtrl @Inject() (
entrypoint: Entrypoint,
db: Database,
@Named("with-thehive-cortex-schema") db: Database,
properties: Properties,
analyzerTemplateSrv: AnalyzerTemplateSrv
) extends QueryableCtrl {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.thp.thehive.connector.cortex.controllers.v0

import com.google.inject.name.Named
import javax.inject.{Inject, Singleton}
import org.scalactic.Good
import org.thp.scalligraph.BadRequestError
Expand All @@ -19,7 +20,7 @@ import scala.reflect.runtime.{universe => ru}
class CortexQueryExecutor @Inject() (
jobCtrl: JobCtrl,
queryCtrlBuilder: QueryCtrlBuilder,
implicit val db: Database,
@Named("with-thehive-cortex-schema") implicit val db: Database,
reportCtrl: AnalyzerTemplateCtrl,
actionCtrl: ActionCtrl
) extends QueryExecutor {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.thp.thehive.connector.cortex.controllers.v0

import com.google.inject.name.Named
import javax.inject.{Inject, Singleton}
import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser}
import org.thp.scalligraph.models.Database
Expand All @@ -22,7 +23,7 @@ import scala.concurrent.{ExecutionContext, Future}
@Singleton
class JobCtrl @Inject() (
entrypoint: Entrypoint,
db: Database,
@Named("with-thehive-cortex-schema") db: Database,
properties: Properties,
jobSrv: JobSrv,
observableSrv: ObservableSrv,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
package org.thp.thehive.connector.cortex.controllers.v0

import scala.concurrent.ExecutionContext

import play.api.libs.json.JsObject
import play.api.mvc.{Action, AnyContent, Results}

import com.google.inject.name.Named
import javax.inject.{Inject, Singleton}
import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser}
import org.thp.scalligraph.models.Database
import org.thp.thehive.connector.cortex.controllers.v0.Conversion._
import org.thp.thehive.connector.cortex.services.ResponderSrv
import org.thp.thehive.controllers.v0.Conversion._
import play.api.libs.json.JsObject
import play.api.mvc.{Action, AnyContent, Results}

import scala.concurrent.ExecutionContext

@Singleton
class ResponderCtrl @Inject() (
entrypoint: Entrypoint,
implicit val db: Database,
@Named("with-thehive-cortex-schema") implicit val db: Database,
responderSrv: ResponderSrv,
implicit val ex: ExecutionContext
) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
package org.thp.thehive.connector.cortex.models

import scala.collection.JavaConverters._
import scala.reflect.runtime.{universe => ru}
import play.api.Logger
import javax.inject.{Inject, Singleton}
import org.reflections.Reflections
import org.reflections.scanners.SubTypesScanner
import org.reflections.util.ConfigurationBuilder
import org.thp.scalligraph.models.{HasModel, Model, Operations, Schema, UpdatableSchema}
import org.thp.scalligraph.models._
import play.api.Logger

import scala.collection.JavaConverters._
import scala.reflect.runtime.{universe => ru}

@Singleton
class CortexSchema @Inject() () extends Schema with UpdatableSchema {
class CortexSchemaDefinition @Inject() () extends Schema with UpdatableSchema {

lazy val logger: Logger = Logger(getClass)
val name: String = "thehive-cortex"
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package org.thp.thehive.connector.cortex.models

import akka.actor.{Actor, ActorRef, ActorSystem, PoisonPill, Props}
import akka.cluster.singleton.{ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings}
import akka.pattern.ask
import akka.util.Timeout
import javax.inject.{Inject, Named, Provider, Singleton}
import org.thp.scalligraph.models.Database
import org.thp.thehive.services.LocalUserSrv
import play.api.Logger

import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
import scala.util.Try

@Singleton
class DatabaseProvider @Inject() (
cortexSchema: CortexSchemaDefinition,
@Named("with-thehive-schema") database: Database,
actorSystem: ActorSystem
) extends Provider[Database] {
import SchemaUpdaterActor._
lazy val schemaUpdaterActor: ActorRef = {
val singletonManager =
actorSystem.actorOf(
ClusterSingletonManager.props(
singletonProps = Props(classOf[SchemaUpdaterActor], cortexSchema, database),
terminationMessage = PoisonPill,
settings = ClusterSingletonManagerSettings(actorSystem)
),
name = "theHiveCortexSchemaUpdaterSingletonManager"
)

actorSystem.actorOf(
ClusterSingletonProxy.props(
singletonManagerPath = singletonManager.path.toStringWithoutAddress,
settings = ClusterSingletonProxySettings(actorSystem)
),
name = "theHiveCortexSchemaUpdaterSingletonProxy"
)
}

override def get(): Database = {
implicit val timeout: Timeout = Timeout(5.minutes)
Await.result(schemaUpdaterActor ? RequestDBStatus, timeout.duration) match {
case DBStatus(status) =>
status.get
database
}
}
}

object SchemaUpdaterActor {
case object RequestDBStatus
case class DBStatus(status: Try[Unit])
}

class SchemaUpdaterActor @Inject() (cortexSchema: CortexSchemaDefinition, database: Database) extends Actor {
import SchemaUpdaterActor._
lazy val logger: Logger = Logger(getClass)

def update(): Try[Unit] =
cortexSchema
.update(database)(LocalUserSrv.getSystemAuthContext)
.recover {
case error => logger.error(s"Database with CortexSchema schema update failure", error)
}

override def receive: Receive = {
case RequestDBStatus =>
val status = update()
sender ! DBStatus(status)
context.become(receive(status))
}

def receive(status: Try[Unit]): Receive = {
case RequestDBStatus =>
status.fold({ _ =>
val newStatus = update()
sender ! DBStatus(newStatus)
context.become(receive(newStatus))
}, _ => sender ! DBStatus(status))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ package org.thp.thehive.connector.cortex.models

import javax.inject.{Inject, Provider, Singleton}
import org.thp.scalligraph.models.Schema
import org.thp.thehive.models.TheHiveSchema
import org.thp.thehive.models.TheHiveSchemaDefinition

@Singleton
class TheHiveCortexSchemaProvider @Inject()(thehiveSchema: TheHiveSchema, cortexSchema: CortexSchema) extends Provider[Schema] {
class TheHiveCortexSchemaProvider @Inject() (thehiveSchema: TheHiveSchemaDefinition, cortexSchema: CortexSchemaDefinition) extends Provider[Schema] {
override lazy val get: Schema = thehiveSchema + cortexSchema
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@ package org.thp.thehive.connector.cortex.services

import java.util.Date

import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

import play.api.libs.json.{JsObject, Json, OWrites}

import akka.actor.ActorRef
import com.google.inject.name.Named
import gremlin.scala._
Expand All @@ -26,6 +21,10 @@ import org.thp.thehive.connector.cortex.services.CortexActor.CheckJob
import org.thp.thehive.controllers.v0.Conversion._
import org.thp.thehive.models.{Case, Task}
import org.thp.thehive.services.{AlertSteps, CaseSteps, LogSteps, ObservableSteps, TaskSteps}
import play.api.libs.json.{JsObject, Json, OWrites}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

class ActionSrv @Inject() (
@Named("cortex-actor") cortexActor: ActorRef,
Expand All @@ -34,7 +33,7 @@ class ActionSrv @Inject() (
serviceHelper: ServiceHelper,
connector: Connector,
implicit val schema: Schema,
implicit val db: Database,
@Named("with-thehive-cortex-schema") implicit val db: Database,
implicit val ec: ExecutionContext,
auditSrv: CortexAuditSrv
) extends VertexSrv[Action, ActionSteps] {
Expand Down Expand Up @@ -183,7 +182,8 @@ class ActionSrv @Inject() (
}

@EntitySteps[Action]
class ActionSteps(raw: GremlinScala[Vertex])(implicit db: Database, graph: Graph, schema: Schema) extends VertexSteps[Action](raw) {
class ActionSteps(raw: GremlinScala[Vertex])(implicit @Named("with-thehive-schema") db: Database, graph: Graph, schema: Schema)
extends VertexSteps[Action](raw) {

/**
* Provides a RichAction model with additional Entity context
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,27 @@ package org.thp.thehive.connector.cortex.services

import java.util.zip.{ZipEntry, ZipFile}

import scala.collection.JavaConverters._
import scala.io.Source
import scala.util.{Failure, Try}

import play.api.libs.json.{JsObject, Json}

import com.google.inject.name.Named
import gremlin.scala._
import javax.inject.{Inject, Singleton}
import org.thp.scalligraph.{CreateError, EntitySteps}
import org.thp.scalligraph.auth.AuthContext
import org.thp.scalligraph.models.{Database, Entity}
import org.thp.scalligraph.query.PropertyUpdater
import org.thp.scalligraph.services._
import org.thp.scalligraph.steps.StepsOps._
import org.thp.scalligraph.steps.VertexSteps
import org.thp.scalligraph.{CreateError, EntitySteps}
import org.thp.thehive.connector.cortex.controllers.v0.Conversion._
import org.thp.thehive.connector.cortex.models.AnalyzerTemplate
import org.thp.thehive.controllers.v0.Conversion._
import play.api.libs.json.{JsObject, Json}

import scala.collection.JavaConverters._
import scala.io.Source
import scala.util.{Failure, Try}
@Singleton
class AnalyzerTemplateSrv @Inject() (
implicit db: Database,
implicit @Named("with-thehive-cortex-schema") db: Database,
auditSrv: CortexAuditSrv
) extends VertexSrv[AnalyzerTemplate, AnalyzerTemplateSteps] {

Expand Down Expand Up @@ -108,7 +107,8 @@ class AnalyzerTemplateSrv @Inject() (
}

@EntitySteps[AnalyzerTemplate]
class AnalyzerTemplateSteps(raw: GremlinScala[Vertex])(implicit db: Database, graph: Graph) extends VertexSteps[AnalyzerTemplate](raw) {
class AnalyzerTemplateSteps(raw: GremlinScala[Vertex])(implicit @Named("with-thehive-cortex-schema") db: Database, graph: Graph)
extends VertexSteps[AnalyzerTemplate](raw) {

def get(idOrAnalyzerId: String): AnalyzerTemplateSteps =
if (db.isValidId(idOrAnalyzerId)) this.getByIds(idOrAnalyzerId)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class CortexAuditSrv @Inject() (
userSrvProvider: Provider[UserSrv],
@Named("notification-actor") notificationActor: ActorRef,
eventSrv: EventSrv
)(implicit db: Database, schema: Schema)
)(implicit @Named("with-thehive-cortex-schema") db: Database, schema: Schema)
extends AuditSrv(userSrvProvider, notificationActor, eventSrv) {

val job = new ObjectAudit[Job, Observable]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,6 @@ package org.thp.thehive.connector.cortex.services
import java.nio.file.Files
import java.util.Date

import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

import play.api.libs.json.Json

import akka.Done
import akka.actor._
import akka.stream.Materializer
Expand All @@ -33,6 +27,11 @@ import org.thp.thehive.connector.cortex.services.CortexActor.CheckJob
import org.thp.thehive.controllers.v0.Conversion._
import org.thp.thehive.models._
import org.thp.thehive.services.{AttachmentSrv, ObservableSrv, ObservableSteps, ObservableTypeSrv, ReportTagSrv}
import play.api.libs.json.Json

import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

@Singleton
class JobSrv @Inject() (
Expand All @@ -44,7 +43,7 @@ class JobSrv @Inject() (
reportTagSrv: ReportTagSrv,
serviceHelper: ServiceHelper,
auditSrv: CortexAuditSrv,
implicit val db: Database,
@Named("with-thehive-schema") implicit val db: Database,
implicit val ec: ExecutionContext,
implicit val mat: Materializer
) extends VertexSrv[Job, JobSteps] {
Expand Down Expand Up @@ -267,7 +266,7 @@ class JobSrv @Inject() (
}

@EntitySteps[Job]
class JobSteps(raw: GremlinScala[Vertex])(implicit db: Database, graph: Graph) extends VertexSteps[Job](raw) {
class JobSteps(raw: GremlinScala[Vertex])(implicit @Named("with-thehive-schema") db: Database, graph: Graph) extends VertexSteps[Job](raw) {

/**
* Checks if a Job is visible from a certain UserRole end
Expand Down
Loading

0 comments on commit a7cdece

Please sign in to comment.