diff --git a/CHANGELOG.md b/CHANGELOG.md index ac6646e61d..4cbab6d666 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Change Log +## [4.0.3](https://github.com/TheHive-Project/TheHive/milestone/66) (2020-12-22) + +**Implemented enhancements:** + +- Providing output details for Responders [\#1293](https://github.com/TheHive-Project/TheHive/issues/1293) +- [Enhancement] Change artifacts by observables on the onMouseOver tooltip of the eye icon of observable [\#1695](https://github.com/TheHive-Project/TheHive/issues/1695) +- [Bug] Enhance support of S3 for attachment storage [\#1705](https://github.com/TheHive-Project/TheHive/issues/1705) +- Update the headers of basic info sections [\#1710](https://github.com/TheHive-Project/TheHive/issues/1710) +- [Enhancement] Add poll duration config for UI Stream [\#1720](https://github.com/TheHive-Project/TheHive/issues/1720) + +**Fixed bugs:** + +- [Bug] MISP filters are not correctly implemented [\#1685](https://github.com/TheHive-Project/TheHive/issues/1685) +- [Bug] The query "getObservable" doesn't work for alert observables [\#1691](https://github.com/TheHive-Project/TheHive/issues/1691) +- Click analyzers mini-report does not load the full report [\#1694](https://github.com/TheHive-Project/TheHive/issues/1694) +- [TH4] Import file observable in gui generate error [\#1697](https://github.com/TheHive-Project/TheHive/issues/1697) +- [Bug] Cannot search for alerts per observables [\#1707](https://github.com/TheHive-Project/TheHive/issues/1707) +- [Bug] Serialization problem in cluster mode [\#1708](https://github.com/TheHive-Project/TheHive/issues/1708) +- [Bug] Issue with sorting [\#1716](https://github.com/TheHive-Project/TheHive/issues/1716) +- [Bug] Identical URL Observables can be added multiple times to the same case [\#1718](https://github.com/TheHive-Project/TheHive/issues/1718) + ## [4.0.2](https://github.com/TheHive-Project/TheHive/milestone/64) (2020-11-20) **Implemented enhancements:** @@ -218,7 +239,6 @@ - A (received) Shared Case is displayed as sender/owner [\#1245](https://github.com/TheHive-Project/TheHive/issues/1245) - FR: Alignment of case custom-fields (metrics) [\#1246](https://github.com/TheHive-Project/TheHive/issues/1246) - Add information about the age of a Case [\#1257](https://github.com/TheHive-Project/TheHive/issues/1257) -- Providing output details for Responders [\#1293](https://github.com/TheHive-Project/TheHive/issues/1293) - Add support to multi-factor authentication [\#1303](https://github.com/TheHive-Project/TheHive/issues/1303) - Add support to webhooks [\#1306](https://github.com/TheHive-Project/TheHive/issues/1306) diff --git a/ScalliGraph b/ScalliGraph index f6a4d2165c..ddbc847ef3 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit f6a4d2165c26826c5b28db1a513ade15dfb060f2 +Subproject commit ddbc847ef30f2507e1287d894ad2191d873a0a87 diff --git a/build.sbt b/build.sbt index e4fa911b8b..d3fb4daeb0 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ import Dependencies._ import com.typesafe.sbt.packager.Keys.bashScriptDefines import org.thp.ghcl.Milestone -val thehiveVersion = "4.0.2-1" +val thehiveVersion = "4.0.3-1" val scala212 = "2.12.12" val scala213 = "2.13.1" val supportedScalaVersions = List(scala212, scala213) diff --git a/cortex/connector/src/main/resources/play/reference-overrides.conf b/cortex/connector/src/main/resources/play/reference-overrides.conf new file mode 100644 index 0000000000..324d422143 --- /dev/null +++ b/cortex/connector/src/main/resources/play/reference-overrides.conf @@ -0,0 +1,13 @@ +akka { + actor { + serializers { + cortex-schema-updater = "org.thp.thehive.connector.cortex.models.SchemaUpdaterSerializer" + cortex-jobs = "org.thp.thehive.connector.cortex.services.CortexSerializer" + } + + serialization-bindings { + "org.thp.thehive.connector.cortex.models.SchemaUpdaterMessage" = cortex-schema-updater + "org.thp.thehive.connector.cortex.services.CortexActorMessage" = cortex-jobs + } + } +} diff --git a/cortex/connector/src/main/resources/reference.conf b/cortex/connector/src/main/resources/reference.conf index f604462606..c106915e85 100644 --- a/cortex/connector/src/main/resources/reference.conf +++ b/cortex/connector/src/main/resources/reference.conf @@ -22,4 +22,4 @@ cortex = { // # HTTP client configuration (SSL and proxy) // # ws {} // }] -} \ No newline at end of file +} diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/controllers/v0/JobCtrl.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/controllers/v0/JobCtrl.scala index cabe1bccc0..e844845d15 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/controllers/v0/JobCtrl.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/controllers/v0/JobCtrl.scala @@ -1,6 +1,7 @@ package org.thp.thehive.connector.cortex.controllers.v0 import com.google.inject.name.Named + import javax.inject.{Inject, Singleton} import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.{Database, UMapping} @@ -14,7 +15,7 @@ import org.thp.thehive.connector.cortex.services.JobOps._ import org.thp.thehive.connector.cortex.services.JobSrv import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.controllers.v0.{OutputParam, PublicData, QueryCtrl} -import org.thp.thehive.models.{Permissions, RichCase, RichObservable} +import org.thp.thehive.models.{Observable, Permissions, RichCase, RichObservable} import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.ObservableSrv import play.api.mvc.{Action, AnyContent, Results} @@ -93,6 +94,9 @@ class PublicJob @Inject() (jobSrv: JobSrv) extends PublicData with JobRenderer { } ) override val outputQuery: Query = Query.outputWithContext[RichJob, Traversal.V[Job]]((jobSteps, authContext) => jobSteps.richJob(authContext)) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[Observable], Traversal.V[Job]]("jobs", (jobTraversal, _) => jobTraversal.jobs) + ) override val publicProperties: PublicProperties = PublicPropertyListBuilder[Job] .property("analyzerId", UMapping.string)(_.rename("workerId").readonly) .property("cortexId", UMapping.string.optional)(_.field.readonly) diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/Job.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/Job.scala index e376e97cab..60ca038fa7 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/Job.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/Job.scala @@ -1,12 +1,12 @@ package org.thp.thehive.connector.cortex.models -import java.util.Date - import org.thp.scalligraph.models.Entity import org.thp.scalligraph.{BuildEdgeEntity, BuildVertexEntity, EntityId} import org.thp.thehive.models.{Observable, RichObservable} import play.api.libs.json.{Format, JsObject, Json} +import java.util.Date + object JobStatus extends Enumeration { val InProgress, Success, Failure, Waiting, Deleted = Value diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterActor.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterActor.scala index 9a7bcde9cd..958a6c3ce0 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterActor.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterActor.scala @@ -4,14 +4,13 @@ import akka.actor.{Actor, ActorRef, ActorSystem, PoisonPill, Props} import akka.cluster.singleton.{ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings} import akka.pattern.ask import akka.util.Timeout -import javax.inject.{Inject, Named, Provider, Singleton} import org.thp.scalligraph.models.Database import org.thp.thehive.services.LocalUserSrv import play.api.Logger +import javax.inject.{Inject, Named, Provider, Singleton} import scala.concurrent.Await import scala.concurrent.duration.DurationInt -import scala.util.Try @Singleton class DatabaseProvider @Inject() ( @@ -19,7 +18,6 @@ class DatabaseProvider @Inject() ( @Named("with-thehive-schema") database: Database, actorSystem: ActorSystem ) extends Provider[Database] { - import SchemaUpdaterActor._ lazy val schemaUpdaterActor: ActorRef = { val singletonManager = actorSystem.actorOf( @@ -42,43 +40,37 @@ class DatabaseProvider @Inject() ( override def get(): Database = { implicit val timeout: Timeout = Timeout(5.minutes) - Await.result(schemaUpdaterActor ? RequestDBStatus, timeout.duration) match { - case DBStatus(status) => - status.get - database + Await.result(schemaUpdaterActor ? RequestDB, timeout.duration) match { + case DBReady => database } } } -object SchemaUpdaterActor { - case object RequestDBStatus - case class DBStatus(status: Try[Unit]) -} +sealed trait SchemaUpdaterMessage +case object RequestDB extends SchemaUpdaterMessage +case object DBReady extends SchemaUpdaterMessage class SchemaUpdaterActor @Inject() (cortexSchema: CortexSchemaDefinition, database: Database) extends Actor { - import SchemaUpdaterActor._ lazy val logger: Logger = Logger(getClass) - def update(): Try[Unit] = + def update(): Unit = { cortexSchema .update(database)(LocalUserSrv.getSystemAuthContext) .recover { case error => logger.error(s"Database with CortexSchema schema update failure", error) } + () + } override def receive: Receive = { - case RequestDBStatus => - val status = update() - sender ! DBStatus(status) - context.become(receive(status)) + case RequestDB => + update() + sender ! DBReady + context.become(databaseUpToDate) } - def receive(status: Try[Unit]): Receive = { - case RequestDBStatus => - status.fold({ _ => - val newStatus = update() - sender ! DBStatus(newStatus) - context.become(receive(newStatus)) - }, _ => sender ! DBStatus(status)) + def databaseUpToDate: Receive = { + case RequestDB => + sender ! DBReady } } diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterSerializer.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterSerializer.scala new file mode 100644 index 0000000000..27e4acc7cb --- /dev/null +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/SchemaUpdaterSerializer.scala @@ -0,0 +1,25 @@ +package org.thp.thehive.connector.cortex.models + +import akka.serialization.Serializer + +import java.io.NotSerializableException + +class SchemaUpdaterSerializer extends Serializer { + override def identifier: Int = -639734235 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case RequestDB => Array(0) + case DBReady => Array(1) + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => RequestDB + case 1 => DBReady + case _ => throw new NotSerializableException + } +} diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionOperationSrv.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionOperationSrv.scala index 2e2722b805..2839e10596 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionOperationSrv.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionOperationSrv.scala @@ -90,7 +90,7 @@ class ActionOperationSrv @Inject() ( case AddLogToTask(content, _) => for { t <- relatedTask.fold[Try[Task with Entity]](Failure(InternalError("Unable to apply action AddLogToTask without task")))(Success(_)) - _ <- logSrv.create(Log(content, new Date(), deleted = false), t) + _ <- logSrv.create(Log(content, new Date(), deleted = false), t, None) } yield updateOperation(operation) case AddArtifactToCase(_, dataType, dataMessage) => diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionSrv.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionSrv.scala index 1ad5e48d03..79d97e3a23 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionSrv.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/ActionSrv.scala @@ -18,7 +18,6 @@ import org.thp.thehive.connector.cortex.controllers.v0.Conversion._ import org.thp.thehive.connector.cortex.models._ import org.thp.thehive.connector.cortex.services.ActionOps._ import org.thp.thehive.connector.cortex.services.Conversion._ -import org.thp.thehive.connector.cortex.services.CortexActor.CheckJob import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.models._ import org.thp.thehive.services.AlertOps._ diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexActor.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexActor.scala index fef75d6f44..aaf59bbb35 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexActor.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexActor.scala @@ -1,39 +1,37 @@ package org.thp.thehive.connector.cortex.services -import java.util.Date - import akka.actor._ import akka.pattern.pipe -import javax.inject.Inject import org.thp.client.ApplicationError -import org.thp.cortex.dto.v0.{JobStatus, JobType, OutputJob => CortexJob} +import org.thp.cortex.dto.v0.{JobStatus, JobType, OutputJob} import org.thp.scalligraph.EntityId import org.thp.scalligraph.auth.AuthContext import play.api.Logger +import java.util.Date +import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -object CortexActor { - final case class CheckJob( - jobId: Option[EntityId], - cortexJobId: String, - actionId: Option[EntityId], - cortexId: String, - authContext: AuthContext - ) - - final private case object CheckJobs - final private case object CheckJobsKey - final private case object FirstCheckJobs -} - +sealed trait CortexActorMessage +case class RemoteJob(job: OutputJob) extends CortexActorMessage +case class CheckJob( + jobId: Option[EntityId], + cortexJobId: String, + actionId: Option[EntityId], + cortexId: String, + authContext: AuthContext +) extends CortexActorMessage + +private case object CheckJobs extends CortexActorMessage +private case object CheckJobsKey +private case object FirstCheckJobs extends CortexActorMessage +// FIXME Add serializer /** * This actor is primarily used to check Job statuses on regular * ticks using the provided client for each job */ class CortexActor @Inject() (connector: Connector, jobSrv: JobSrv, actionSrv: ActionSrv) extends Actor with Timers { - import CortexActor._ implicit val ec: ExecutionContext = context.dispatcher lazy val logger: Logger = Logger(getClass) @@ -66,35 +64,36 @@ class CortexActor @Inject() (connector: Connector, jobSrv: JobSrv, actionSrv: Ac .getReport(cortexJobId, 1.second) .recover { // this is a workaround for a timeout bug in Cortex case ApplicationError(500, body) if (body \ "type").asOpt[String].contains("akka.pattern.AskTimeoutException") => - CortexJob(cortexJobId, "", "", "", new Date, None, None, JobStatus.InProgress, None, None, "", "", None, JobType.analyzer) + OutputJob(cortexJobId, "", "", "", new Date, None, None, JobStatus.InProgress, None, None, "", "", None, JobType.analyzer) } + .map(RemoteJob) .pipeTo(self) () } } - case cortexJob: CortexJob if cortexJob.status == JobStatus.Success || cortexJob.status == JobStatus.Failure => - checkedJobs.find(_.cortexJobId == cortexJob.id) match { - case Some(CheckJob(Some(jobId), cortexJobId, _, cortexId, authContext)) if cortexJob.`type` == JobType.analyzer => - logger.info(s"Job $cortexJobId in cortex $cortexId has finished with status ${cortexJob.status}, updating job $jobId") - jobSrv.finished(cortexId, jobId, cortexJob)(authContext) - context.become(receive(checkedJobs.filterNot(_.cortexJobId == cortexJob.id), failuresCount)) + case RemoteJob(job) if job.status == JobStatus.Success || job.status == JobStatus.Failure => + checkedJobs.find(_.cortexJobId == job.id) match { + case Some(CheckJob(Some(jobId), cortexJobId, _, cortexId, authContext)) if job.`type` == JobType.analyzer => + logger.info(s"Job $cortexJobId in cortex $cortexId has finished with status ${job.status}, updating job $jobId") + jobSrv.finished(cortexId, jobId, job)(authContext) + context.become(receive(checkedJobs.filterNot(_.cortexJobId == job.id), failuresCount)) - case Some(CheckJob(_, cortexJobId, Some(actionId), cortexId, authContext)) if cortexJob.`type` == JobType.responder => - logger.info(s"Job $cortexJobId in cortex $cortexId has finished with status ${cortexJob.status}, updating action $actionId") - actionSrv.finished(actionId, cortexJob)(authContext) - context.become(receive(checkedJobs.filterNot(_.cortexJobId == cortexJob.id), failuresCount)) + case Some(CheckJob(_, cortexJobId, Some(actionId), cortexId, authContext)) if job.`type` == JobType.responder => + logger.info(s"Job $cortexJobId in cortex $cortexId has finished with status ${job.status}, updating action $actionId") + actionSrv.finished(actionId, job)(authContext) + context.become(receive(checkedJobs.filterNot(_.cortexJobId == job.id), failuresCount)) case Some(_) => - logger.error(s"CortexActor received job output $cortexJob but with unknown type ${cortexJob.`type`}") + logger.error(s"CortexActor received job output $job but with unknown type ${job.`type`}") case None => - logger.error(s"CortexActor received job output $cortexJob but did not have it in state $checkedJobs") + logger.error(s"CortexActor received job output $job but did not have it in state $checkedJobs") } - case cortexJob: CortexJob if cortexJob.status == JobStatus.InProgress || cortexJob.status == JobStatus.Waiting => - logger.info(s"CortexActor received ${cortexJob.status} from client, retrying in ${connector.refreshDelay}") + case RemoteJob(job) if job.status == JobStatus.InProgress || job.status == JobStatus.Waiting => + logger.info(s"CortexActor received ${job.status} from client, retrying in ${connector.refreshDelay}") - case _: CortexJob => + case _: RemoteJob => logger.warn(s"CortexActor received JobStatus.Unknown from client, retrying in ${connector.refreshDelay}") case Status.Failure(e) if failuresCount < connector.maxRetryOnError => diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexSerializer.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexSerializer.scala new file mode 100644 index 0000000000..68c70f482a --- /dev/null +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/CortexSerializer.scala @@ -0,0 +1,55 @@ +package org.thp.thehive.connector.cortex.services + +import akka.serialization.Serializer +import org.thp.cortex.dto.v0.OutputJob +import org.thp.scalligraph.EntityIdOrName +import org.thp.scalligraph.auth.{AuthContext, AuthContextImpl, Permission} +import play.api.libs.functional.syntax._ +import play.api.libs.json._ + +import java.io.NotSerializableException + +object CortexSerializer { + implicit val authContextReads: Reads[AuthContext] = + ((JsPath \ "userId").read[String] and + (JsPath \ "userName").read[String] and + (JsPath \ "organisation").read[String].map(EntityIdOrName.apply) and + (JsPath \ "requestId").read[String] and + (JsPath \ "permissions").read[Set[String]].map(Permission.apply))(AuthContextImpl.apply _) + + implicit val authContextWrites: Writes[AuthContext] = Writes[AuthContext] { authContext => + Json.obj( + "userId" -> authContext.userId, + "userName" -> authContext.userName, + "organisation" -> authContext.organisation.toString, + "requestId" -> authContext.requestId, + "permissions" -> authContext.permissions + ) + } + implicit val format: OFormat[CheckJob] = Json.format[CheckJob] +} + +class CortexSerializer extends Serializer { + import CortexSerializer._ + override def identifier: Int = -414525848 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case CheckJobs => Array(0) + case FirstCheckJobs => Array(1) + case RemoteJob(job) => 2.toByte +: Json.toJson(job).toString.getBytes + case cj: CheckJob => 3.toByte +: Json.toJson(cj).toString().getBytes + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => CheckJobs + case 1 => FirstCheckJobs + case 2 => RemoteJob(Json.parse(bytes.tail).as[OutputJob]) + case 3 => Json.parse(bytes.tail).as[CheckJob] + case _ => throw new NotSerializableException + } +} diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/JobSrv.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/JobSrv.scala index c9196e6c74..f8477a3baf 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/JobSrv.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/JobSrv.scala @@ -24,7 +24,6 @@ import org.thp.scalligraph.{EntityId, EntityIdOrName, NotFoundError} import org.thp.thehive.connector.cortex.controllers.v0.Conversion._ import org.thp.thehive.connector.cortex.models._ import org.thp.thehive.connector.cortex.services.Conversion._ -import org.thp.thehive.connector.cortex.services.CortexActor.CheckJob import org.thp.thehive.connector.cortex.services.JobOps._ import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.models._ diff --git a/docker/README.md b/docker/README.md deleted file mode 100644 index 2bb97b37c2..0000000000 --- a/docker/README.md +++ /dev/null @@ -1,161 +0,0 @@ -## Example of docker-compose (not for production) -With this docker-compose.yml you will be able to run the following images: -- The Hive 4.0.1-1 -- Cassandra 3.11 -- Cortex 3.1.0-1 -- Elasticsearch 7.9.3 -- Kibana 7.9.3 -- MISP 2.4.134 -- Mysql 8.0.22 -- Redis 6.0.9 -- Shuffle 0.7.6 - -## Some Hint - -### docker-compose version -In docker-compose version is set 3.8, to run this version you need at least Docker Engine 19.03.0+ (check widh docker --version) and at least Docker Compose 1.25.5 (check with docker-compose --version) -``` -Compose file format Docker Engine release -3.8 19.03.0+ -3.7 18.06.0+ -3.6 18.02.0+ -3.5 17.12.0+ -3.4 17.09.0+ -``` -If for some reason you have a previous version of Docker Engine or a previous version of Docker Compose and can't upgrade those, you can use 3.7 or 3.6 in docker-compose.yml - - -### Mapping volumes -If you take a look of docker-compose.yml you will see you need some local folder that needs to be mapped, so before do docker-compose up, ensure at least folders with config files exist: -- ./cortex/application.conf:/etc/cortex/application.conf -- ./thehive/application.conf:/etc/thehive/application.conf - -Structure would look like: -``` -├── docker-compose.yml -├── elasticsearch_data -|── elasticsearch_logs -├── cortex -│ └── application.conf -|── thehive -| └── application.conf -|── data -|── mysql -``` -If you run docker-compose with sudo, ensure you have created elasticsearch_data and elasticsearch_logs folders with non root user, otherwise elasticsearch container will not start. - -### ElasticSearch -ElasticSearch container likes big mmap count (https://www.elastic.co/guide/en/elasticsearch/reference/current/vm-max-map-count.html) so from shell you can change with -```sysctl -w vm.max_map_count=262144``` -To set this value permanently, update the vm.max_map_count setting in /etc/sysctl.conf. To verify after rebooting, run sysctl vm.max_map_count - -If you would run all containers on the same system - and maybe you have a limited amount of RAM - better to set some limit, for ElasticSearch, in docker-compose.yml I added those: - -```- bootstrap.memory_lock=true``` -```- "ES_JAVA_OPTS=-Xms256m -Xmx256m"``` - -Adjust depending on your needs and your env. Without these settings in my environment ElasticSearch was using 1.5GB - - -### Cassandra -Like for ElasticSearch maybe you would run all on same system and maybe you don't have a limited amount of RAM, better to set some size, here for Cassandra, in docker-compose.yml I added those: - -```- MAX_HEAP_SIZE=1G``` -```- HEAP_NEWSIZE=1G``` - -Adjust depending on your needs and your env. Without these settings in my environment Cassandra was using 4GB. - -### Cortex-Analyzers -- In order to use Analyzers in docker version, it is set the online json url instead absolute path of analyzers in the application.conf of Cortex: - https://download.thehive-project.org/analyzers.json -- In order to use Analyzers in docker version it is set the application.conf thejob: ``` - job { - runner = [docker] -} ``` -- The analyzer in docker version will need to download from internet images, so have to add in "/etc/default/docker" - ``` DOCKER_OPTS="--dns 8.8.8.8 --dns 1.1.1.1" ``` -- When Cortex launches an analyzer need to pass the object to being analyzed, so need share /tmp folder -- When Cortex launches an analyzer it uses docker.sock, have to map in compose - ``` /var/run/docker.sock:/var/run/docker.sock ``` -- Have to change permission on /var/run/docker.sock in order to let use socket by cortex docker and cortex-analyzers docker - ```sudo chmod 666 /var/run/docker.sock``` -- First time an analyzer/responder is executed, it will take a while because docker image is being downloaded on the fly, from second run of analyzer/responder it runs normally - -### Cortex -- login page on 9001 port, then click "update database" and create superadmin -- as superadmin create org and other user (remember to set password) and create apikey to use for connect with the hive - -### The Hive -- In order to let The Hive reads the external application.conf and configure Cortex had to pass in command of docker compose the following option: - --no-config -- In order to let The Hive reads the external application.conf and configure MISP for receive alerts had to pass in command of docker compose the following option: - ``` --no-config-secret ``` -- Default credentials: admin@thehive.local // secret -- In order to connect The Hive with cortex take the cortex key generated in Cortex and set it in thehive/application.conf -- MISP connection is https, in order to skip the verify of self signed certificate have do add this setting in the hive application.conf under MISP section: - ``` wsConfig { ssl { loose { acceptAnyCertificate: true } } } ``` - - -### MISP - -- login with default credentials: admin@admin.test // admin -- request change password -- go in Automation page and grab the api key to use in the hive application.conf to receive alerts from MISP or to use in MISP analyzers inside Cortex. - - -### SHUFFLE -To test automation I choose SHUFFLE (https://shuffler.io/) - -In docker-compose.yml , after the comment "#READY FOR AUTOMATION ? " there is part dedicated to Shuffle (you can remove as the others if not needed) -Here will not document how to use it, there is already documentation (https://shuffler.io/docs/about). - -Here just describe how to connect the things together. - -- After SHUFFLE starts, go at login page (the frontend port by default is 3001), put credentials choosen in docker-compose.yml , for your convenience I set admin // password , create your first workflow, can be anything you have in mind, then go in Triggers, place Webhook node on dashboard, select it and grab the Webhook URI. it will be something like http://192.168.29.1:3001/api/v1/hooks/webhook_0982214b-3b92-4a85-b6fa-771982c2b449 -- Go in applicaiton.conf of The Hive and modify the url under webhook notification part: -``` -notification.webhook.endpoints = [ - { - name: local - url: "http://192.168.29.1:3001/api/v1/hooks/webhook_0982214b-3b92-4a85-b6fa-771982c2b449" - version: 0 - wsConfig: {} - includedTheHiveOrganisations: [] - excludedTheHiveOrganisations: [] - } -] -``` -- In The Hive webhooks are not enabled by default, you should enable it, there is a guide to do it: https://github.com/TheHive-Project/TheHiveDocs/blob/master/TheHive4/Administration/Webhook.md -In my case I had to call this: -``` -curl -XPUT -uuser@thehive.local:user@thehive.local -H 'Content-type: application/json' 127.0.0.1:9000/api/config/organisation/notification -d ' -{ - "value": [ - { - "delegate": false, - "trigger": { "name": "AnyEvent"}, - "notifier": { "name": "webhook", "endpoint": "local" } - } - ] -}' -``` -- Now are able to play automation with The Hive, Cortex-Analyzers, MISP thanks to SHUFFLE! - - -### Result -In conclusion, after execute ```sudo docker-compose up``` you will have the following services running: - - -| Service | Address | User | Password | -|----------|:-------------:|:------:|------:| -| The Hive | http://localhost:9000 | admin@thehive.local | secret -| Cortex | http://localhost:9001 | | -| Elasticsearch | http://localhost:9200 | | -| Kibana | http://localhost:5601 | | -| MISP | https://localhost:443 | admin@admin.test | admin -| Shuffle | http://localhost:3001 | | - - - -![image](https://user-images.githubusercontent.com/16938405/99674126-e8c99f80-2a75-11eb-9a8b-1603cf67d665.png) -![image](https://user-images.githubusercontent.com/16938405/99674544-7c02d500-2a76-11eb-92a5-3fbb5c3c5cc5.png) diff --git a/docker/cortex/application.conf b/docker/cortex/application.conf deleted file mode 100644 index 6236c81902..0000000000 --- a/docker/cortex/application.conf +++ /dev/null @@ -1,217 +0,0 @@ -# Sample Cortex application.conf file - -## SECRET KEY -# -# The secret key is used to secure cryptographic functions. -# -# IMPORTANT: If you deploy your application to several instances, make -# sure to use the same key. -play.http.secret.key="msd3232fdn3ofgfbki83ihtzHSD" - -## ElasticSearch -search { - # Name of the index - index = cortex - # ElasticSearch instance address. - # For cluster, join address:port with ',': "http://ip1:9200,ip2:9200,ip3:9200" - uri = "http://elasticsearch:9200" - - ## Advanced configuration - # Scroll keepalive. - #keepalive = 1m - # Scroll page size. - #pagesize = 50 - # Number of shards - #nbshards = 5 - # Number of replicas - #nbreplicas = 1 - # Arbitrary settings - #settings { - # # Maximum number of nested fields - # mapping.nested_fields.limit = 100 - #} - - ## Authentication configuration - #search.username = "" - #search.password = "" - - ## SSL configuration - #search.keyStore { - # path = "/path/to/keystore" - # type = "JKS" # or PKCS12 - # password = "keystore-password" - #} - #search.trustStore { - # path = "/path/to/trustStore" - # type = "JKS" # or PKCS12 - # password = "trustStore-password" - #} -} - -## Cache -# -# If an analyzer is executed against the same observable, the previous report can be returned without re-executing the -# analyzer. The cache is used only if the second job occurs within cache.job (the default is 10 minutes). -cache.job = 10 minutes - -## Authentication -auth { - # "provider" parameter contains the authentication provider(s). It can be multi-valued, which is useful - # for migration. - # The available auth types are: - # - services.LocalAuthSrv : passwords are stored in the user entity within ElasticSearch). No - # configuration are required. - # - ad : use ActiveDirectory to authenticate users. The associated configuration shall be done in - # the "ad" section below. - # - ldap : use LDAP to authenticate users. The associated configuration shall be done in the - # "ldap" section below. - # - oauth2 : use OAuth/OIDC to authenticate users. Configuration is under "auth.oauth2" and "auth.sso" keys - provider = [local] - - ad { - # The Windows domain name in DNS format. This parameter is required if you do not use - # 'serverNames' below. - #domainFQDN = "mydomain.local" - - # Optionally you can specify the host names of the domain controllers instead of using 'domainFQDN - # above. If this parameter is not set, TheHive uses 'domainFQDN'. - #serverNames = [ad1.mydomain.local, ad2.mydomain.local] - - # The Windows domain name using short format. This parameter is required. - #domainName = "MYDOMAIN" - - # If 'true', use SSL to connect to the domain controller. - #useSSL = true - } - - ldap { - # The LDAP server name or address. The port can be specified using the 'host:port' - # syntax. This parameter is required if you don't use 'serverNames' below. - #serverName = "ldap.mydomain.local:389" - - # If you have multiple LDAP servers, use the multi-valued setting 'serverNames' instead. - #serverNames = [ldap1.mydomain.local, ldap2.mydomain.local] - - # Account to use to bind to the LDAP server. This parameter is required. - #bindDN = "cn=thehive,ou=services,dc=mydomain,dc=local" - - # Password of the binding account. This parameter is required. - #bindPW = "***secret*password***" - - # Base DN to search users. This parameter is required. - #baseDN = "ou=users,dc=mydomain,dc=local" - - # Filter to search user in the directory server. Please note that {0} is replaced - # by the actual user name. This parameter is required. - #filter = "(cn={0})" - - # If 'true', use SSL to connect to the LDAP directory server. - #useSSL = true - } - - oauth2 { - # URL of the authorization server - #clientId = "client-id" - #clientSecret = "client-secret" - #redirectUri = "https://my-thehive-instance.example/index.html#!/login" - #responseType = "code" - #grantType = "authorization_code" - - # URL from where to get the access token - #authorizationUrl = "https://auth-site.com/OAuth/Authorize" - #tokenUrl = "https://auth-site.com/OAuth/Token" - - # The endpoint from which to obtain user details using the OAuth token, after successful login - #userUrl = "https://auth-site.com/api/User" - #scope = "openid profile" - # Type of authorization header - #authorizationHeader = "Bearer" # or token - } - - # Single-Sign On - sso { - # Autocreate user in database? - #autocreate = false - - # Autoupdate its profile and roles? - #autoupdate = false - - # Autologin user using SSO? - #autologin = false - - # Attributes mappings - #attributes { - # login = "login" - # name = "name" - # groups = "groups" - # roles = "roles" # list of roles, separated with comma - # organisation = "org" - #} - - # Name of mapping class from user resource to backend user ('simple' or 'group') - #mapper = group - # Default roles for users with no groups mapped ("read", "analyze", "orgadmin") - #defaultRoles = [] - # Default organization - #defaultOrganization = "MyOrga" - - #groups { - # # URL to retreive groups (leave empty if you are using OIDC) - # #url = "https://auth-site.com/api/Groups" - # # Group mappings, you can have multiple roles for each group: they are merged - # mappings { - # admin-profile-name = ["admin"] - # editor-profile-name = ["write"] - # reader-profile-name = ["read"] - # } - #} - } -} - -job { - runner = [docker] -} -## ANALYZERS -# -analyzer { - # analyzer location - # url can be point to: - # - directory where analyzers are installed - # - json file containing the list of analyzer descriptions - urls = [ - "https://download.thehive-project.org/analyzers.json" - #"/absolute/path/of/analyzers" - ] - - # Sane defaults. Do not change unless you know what you are doing. - fork-join-executor { - # Min number of threads available for analysis. - parallelism-min = 2 - # Parallelism (threads) ... ceil(available processors * factor). - parallelism-factor = 2.0 - # Max number of threads available for analysis. - parallelism-max = 4 - } -} - -# RESPONDERS -# -responder { - # responder location (same format as analyzer.urls) - urls = [ - "https://download.thehive-project.org/responders.json" - #"/absolute/path/of/responders" - ] - - # Sane defaults. Do not change unless you know what you are doing. - fork-join-executor { - # Min number of threads available for analysis. - parallelism-min = 2 - # Parallelism (threads) ... ceil(available processors * factor). - parallelism-factor = 2.0 - # Max number of threads available for analysis. - parallelism-max = 4 - } -} - -# It's the end my friend. Happy hunting! diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml deleted file mode 100644 index 1bb7b6b63e..0000000000 --- a/docker/docker-compose.yml +++ /dev/null @@ -1,178 +0,0 @@ -version: "3.8" -services: - elasticsearch: - image: 'elasticsearch:7.9.3' - container_name: elasticsearch - restart: unless-stopped - ports: - - '0.0.0.0:9200:9200' - environment: - - http.host=0.0.0.0 - - discovery.type=single-node - - cluster.name=hive - - script.allowed_types= inline - - thread_pool.search.queue_size=100000 - - thread_pool.write.queue_size=10000 - - gateway.recover_after_nodes=1 - - xpack.security.enabled=false - - bootstrap.memory_lock=true - - 'ES_JAVA_OPTS=-Xms256m -Xmx256m' - ulimits: - nofile: - soft: 65536 - hard: 65536 - volumes: - - ./elasticsearch_data:/usr/share/elasticsearch/data - - ./elasticsearch_logs:/usr/share/elasticsearch/logs - kibana: - image: 'docker.elastic.co/kibana/kibana:7.9.3' - container_name: kibana - restart: unless-stopped - depends_on: - - elasticsearch - ports: - - '5601:5601' - cortex: - image: 'thehiveproject/cortex:3.1.0-1' - container_name: cortex - restart: unless-stopped - volumes: - - ./cortex/application.conf:/etc/cortex/application.conf - - /var/run/docker.sock:/var/run/docker.sock - - /tmp:/tmp - depends_on: - - elasticsearch - ports: - - '0.0.0.0:9001:9001' - - cassandra: - image: cassandra:3.11 - container_name: cassandra - restart: unless-stopped - hostname: cassandra - environment: - - MAX_HEAP_SIZE=1G - - HEAP_NEWSIZE=1G - - CASSANDRA_CLUSTER_NAME=thp - volumes: - - ./cassandra-data:/var/lib/cassandra/data - - thehive: - image: 'thehiveproject/thehive4:latest' - container_name: thehive - restart: unless-stopped - depends_on: - - cassandra - ports: - - '0.0.0.0:9000:9000' - volumes: - - ./thehive/application.conf:/etc/thehive/application.conf - - ./data:/opt/data - command: '--no-config --no-config-secret' - - redis: - image: redis:latest - container_name: redis - restart: unless-stopped - - db: - image: mysql:latest - container_name: mysql - restart: unless-stopped - command: --default-authentication-plugin=mysql_native_password - restart: always - environment: - - "MYSQL_USER=misp" - - "MYSQL_PASSWORD=example" - - "MYSQL_ROOT_PASSWORD=password" - - "MYSQL_DATABASE=misp" - volumes: - - ./mysql:/var/lib/mysql - misp: - image: coolacid/misp-docker:core-latest - container_name: misp - restart: unless-stopped - depends_on: - - redis - - db - ports: - - "80:80" - - "443:443" - environment: - - "HOSTNAME=https://localhost" - - "REDIS_FQDN=redis" - - "INIT=true" # Initialze MISP, things includes, attempting to import SQL and the Files DIR - - "CRON_USER_ID=1" # The MISP user ID to run cron jobs as - - "DISIPV6=true" # Disable IPV6 in nginx - misp-modules: - image: coolacid/misp-docker:modules-latest - container_name: misp-modules - environment: - - "REDIS_BACKEND=redis" - depends_on: - - redis - - db - -#READY FOR AUTOMATION ? - frontend: - image: frikky/shuffle:frontend - container_name: shuffle-frontend - hostname: shuffle-frontend - ports: - - "3001:80" - - "3443:443" - environment: - - BACKEND_HOSTNAME=shuffle-backend - restart: unless-stopped - depends_on: - - backend - backend: - image: frikky/shuffle:backend - container_name: shuffle-backend - hostname: shuffle-backend - ports: - - "5001:5001" - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - ./shuffle-apps:/shuffle-apps - environment: - - DATASTORE_EMULATOR_HOST=shuffle-database:8000 - - SHUFFLE_APP_HOTLOAD_FOLDER=./shuffle-apps - - ORG_ID=Shuffle - - SHUFFLE_APP_DOWNLOAD_LOCATION=https://github.com/frikky/shuffle-apps - - SHUFFLE_DEFAULT_USERNAME=admin - - SHUFFLE_DEFAULT_PASSWORD=password - - SHUFFLE_DEFAULT_APIKEY=mysecretkey - - HTTP_PROXY= - - HTTPS_PROXY= - restart: unless-stopped - depends_on: - - database - orborus: - image: frikky/shuffle:orborus - container_name: shuffle-orborus - hostname: shuffle-orborus - volumes: - - /var/run/docker.sock:/var/run/docker.sock - environment: - - SHUFFLE_APP_SDK_VERSION=0.6.0 - - SHUFFLE_WORKER_VERSION=0.6.0 - - ORG_ID=Shuffle - - ENVIRONMENT_NAME=Shuffle - - BASE_URL=http://shuffle-backend:5001 - - DOCKER_API_VERSION=1.40 - - HTTP_PROXY= - - HTTPS_PROXY= - - SHUFFLE_PASS_WORKER_PROXY=False - restart: unless-stopped - database: - image: frikky/shuffle:database - container_name: shuffle-database - hostname: shuffle-database - ports: - - "8000:8000" - restart: unless-stopped - volumes: - - ./shuffle-database:/etc/shuffle - - diff --git a/docker/thehive/application.conf b/docker/thehive/application.conf deleted file mode 100644 index b6ed0da698..0000000000 --- a/docker/thehive/application.conf +++ /dev/null @@ -1,78 +0,0 @@ -play.http.secret.key="t5EeDXh2dEtJxohh" - -# JanusGraph -db { - provider: janusgraph - janusgraph { - storage { - backend: cql - hostname: ["cassandra"] - - cql { - cluster-name: thp # cluster name - keyspace: thehive # name of the keyspace - read-consistency-level: ONE - write-consistency-level: ONE - } - } - } -} - -storage { - provider: localfs - localfs.location: /opt/data -} - -play.modules.enabled += org.thp.thehive.connector.cortex.CortexModule -cortex { - servers = [ - { - name = local - url = "http://cortex:9001" - auth { - type = "bearer" - key = "JmjjnBDuLL2WgJBsF00vmxTdWTqMj0Jw" - } - # HTTP client configuration (SSL and proxy) - # wsConfig {} - # List TheHive organisation which can use this Cortex server. All ("*") by default - # includedTheHiveOrganisations = ["*"] - # List TheHive organisation which cannot use this Cortex server. None by default - # excludedTheHiveOrganisations = [] - } - ] - # Check job update time intervalcortex - refreshDelay = 5 seconds - # Maximum number of successive errors before give up - maxRetryOnError = 3 - # Check remote Cortex status time interval - statusCheckInterval = 1 minute -} -# MISP configuration -play.modules.enabled += org.thp.thehive.connector.misp.MispModule -misp { - interval: 5 min - servers: [ - { - name = "MISP THP" # MISP name - url = "https://misp/" # URL or MISP - auth { - type = key - key = "s7wSDr0I78WD8ImMpS2P8sX9Iy9N4Jiboz3pdWtm" # MISP API key - } - wsConfig { ssl { loose { acceptAnyCertificate: true } } } - } - ] -} - - -notification.webhook.endpoints = [ - { - name: local - url: "http://thehive:5000/" - version: 0 - wsConfig: {} - includedTheHiveOrganisations: [] - excludedTheHiveOrganisations: [] - } -] diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Attachment.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Attachment.scala index f1fe2ce0b6..b5b7a1377b 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v0/Attachment.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v0/Attachment.scala @@ -1,6 +1,12 @@ package org.thp.thehive.dto.v0 -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{Json, OFormat, Writes} + +case class InputAttachment(name: String, contentType: String, id: String) + +object InputAttachment { + implicit val writes: Writes[InputAttachment] = Json.writes[InputAttachment] +} case class OutputAttachment(name: String, hashes: Seq[String], size: Long, contentType: String, id: String) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v0/Observable.scala b/dto/src/main/scala/org/thp/thehive/dto/v0/Observable.scala index 369ead4aa9..37173a6296 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v0/Observable.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v0/Observable.scala @@ -1,7 +1,6 @@ package org.thp.thehive.dto.v0 import java.util.Date - import org.scalactic.Accumulation._ import org.scalactic.Good import org.thp.scalligraph.controllers._ @@ -9,11 +8,12 @@ import play.api.libs.json.{JsObject, Json, OFormat, Writes} case class InputObservable( dataType: String, - @WithParser(InputObservable.fp) + @WithParser(InputObservable.dataParser) data: Seq[String] = Nil, message: Option[String] = None, startDate: Option[Date] = None, - attachment: Option[FFile] = None, + @WithParser(InputObservable.fileOrAttachmentParser) + attachment: Seq[Either[FFile, InputAttachment]] = Seq.empty, tlp: Option[Int] = None, tags: Set[String] = Set.empty, ioc: Option[Boolean] = None, @@ -22,14 +22,31 @@ case class InputObservable( ) object InputObservable { + implicit val fileOrAttachmentWrites: Writes[Either[FFile, InputAttachment]] = Writes[Either[FFile, InputAttachment]] { + case Left(file) => Json.toJson(file) + case Right(attachment) => Json.toJson(attachment) + } implicit val writes: Writes[InputObservable] = Json.writes[InputObservable] - val fp: FieldsParser[Seq[String]] = FieldsParser[Seq[String]]("data") { + val dataParser: FieldsParser[Seq[String]] = FieldsParser[Seq[String]]("data") { case (_, FString(s)) => Good(Seq(s)) case (_, FAny(s)) => Good(s) case (_, FSeq(a)) => a.validatedBy(FieldsParser.string(_)) case (_, FUndefined) => Good(Nil) } + + val fileOrAttachmentParser: FieldsParser[Seq[Either[FFile, InputAttachment]]] = + FieldsParser[FFile] + .map("fileOrAttachmentParser")(f => Seq(Left(f))) + .recover( + FieldsParser[InputAttachment] + .map("fileOrAttachmentParser")(a => Seq(Right(a))) + .recover( + FieldsParser[InputAttachment] + .sequence + .map("fileOrAttachmentParser")(as => as.map(Right(_))) + ) + ) } case class OutputObservable( diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Attachment.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Attachment.scala index e0f9d9dfcc..afd3d5719e 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Attachment.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Attachment.scala @@ -1,6 +1,12 @@ package org.thp.thehive.dto.v1 -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{Json, OFormat, Writes} + +case class InputAttachment(name: String, contentType: String, id: String) + +object InputAttachment { + implicit val writes: Writes[InputAttachment] = Json.writes[InputAttachment] +} case class OutputAttachment(name: String, hashes: Seq[String], size: Long, contentType: String, id: String) diff --git a/dto/src/main/scala/org/thp/thehive/dto/v1/Observable.scala b/dto/src/main/scala/org/thp/thehive/dto/v1/Observable.scala index 4211b762f9..3562dab2cd 100644 --- a/dto/src/main/scala/org/thp/thehive/dto/v1/Observable.scala +++ b/dto/src/main/scala/org/thp/thehive/dto/v1/Observable.scala @@ -1,7 +1,6 @@ package org.thp.thehive.dto.v1 import java.util.Date - import org.scalactic.Accumulation._ import org.scalactic.Good import org.thp.scalligraph.controllers._ @@ -9,11 +8,12 @@ import play.api.libs.json.{JsObject, Json, OFormat, Writes} case class InputObservable( dataType: String, - @WithParser(InputObservable.fp) + @WithParser(InputObservable.dataParser) data: Seq[String] = Nil, message: Option[String] = None, startDate: Option[Date] = None, - attachment: Option[FFile] = None, + @WithParser(InputObservable.fileOrAttachmentParser) + attachment: Seq[Either[FFile, InputAttachment]] = Seq.empty, tlp: Option[Int] = None, tags: Set[String] = Set.empty, ioc: Option[Boolean] = None, @@ -22,14 +22,32 @@ case class InputObservable( ) object InputObservable { + implicit val fileOrAttachmentWrites: Writes[Either[FFile, InputAttachment]] = Writes[Either[FFile, InputAttachment]] { + case Left(file) => Json.toJson(file) + case Right(attachment) => Json.toJson(attachment) + } + implicit val writes: Writes[InputObservable] = Json.writes[InputObservable] - val fp: FieldsParser[Seq[String]] = FieldsParser[Seq[String]]("data") { + val dataParser: FieldsParser[Seq[String]] = FieldsParser[Seq[String]]("data") { case (_, FString(s)) => Good(Seq(s)) case (_, FAny(s)) => Good(s) case (_, FSeq(a)) => a.validatedBy(FieldsParser.string(_)) case (_, FUndefined) => Good(Nil) } + + val fileOrAttachmentParser: FieldsParser[Seq[Either[FFile, InputAttachment]]] = + FieldsParser[FFile] + .map("fileOrAttachmentParser")(f => Seq(Left(f))) + .recover( + FieldsParser[InputAttachment] + .map("fileOrAttachmentParser")(a => Seq(Right(a))) + .recover( + FieldsParser[InputAttachment] + .sequence + .map("fileOrAttachmentParser")(as => as.map(Right(_))) + ) + ) } case class OutputObservable( diff --git a/frontend/app/index.html b/frontend/app/index.html index 800b08a1ae..d419c6d6d0 100644 --- a/frontend/app/index.html +++ b/frontend/app/index.html @@ -150,6 +150,7 @@ + @@ -195,6 +196,7 @@ + diff --git a/frontend/app/scripts/components/sharing/task/SharingListCmp.js b/frontend/app/scripts/components/sharing/task/SharingListCmp.js new file mode 100644 index 0000000000..fc3930fc38 --- /dev/null +++ b/frontend/app/scripts/components/sharing/task/SharingListCmp.js @@ -0,0 +1,44 @@ +(function() { + 'use strict'; + + angular.module('theHiveComponents') + .component('taskSharingList', { + controller: function() { + var self = this; + + this.remove = function(share) { + this.onDelete({ + share: share + }); + }; + + this.updateProfile = function(org, newProfile) { + this.onUpdateProfile({ + profile: newProfile, + org: org + }); + }; + + this.requireAction = function(org) { + this.onRequireAction({ + task: self.task, + org: org + }); + }; + }, + controllerAs: '$ctrl', + templateUrl: 'views/components/sharing/task/sharing-list.html', + bindings: { + task: '<', + shares: '<', + organisations: '<', + profiles: '<', + readOnly: '<', + //onReload: '&', + onUpdateProfile: '&', + onDelete: '&', + onRequireAction: '&', + permissions: '=' + } + }); +})(); diff --git a/frontend/app/scripts/controllers/MainPageCtrl.js b/frontend/app/scripts/controllers/MainPageCtrl.js index 8320212b90..da42a4b96f 100644 --- a/frontend/app/scripts/controllers/MainPageCtrl.js +++ b/frontend/app/scripts/controllers/MainPageCtrl.js @@ -63,7 +63,7 @@ filter: self.filtering.buildQuery(), baseFilter: view === 'mytasks' ? self.defaultFilter : [], operations: self.queryOperations, - extraData: ['case'], + extraData: ['case', 'actionRequired'], name: $stateParams.viewId }); }; diff --git a/frontend/app/scripts/controllers/case/CaseListCtrl.js b/frontend/app/scripts/controllers/case/CaseListCtrl.js index 5d6f22f2ee..4d44f9e5fe 100644 --- a/frontend/app/scripts/controllers/case/CaseListCtrl.js +++ b/frontend/app/scripts/controllers/case/CaseListCtrl.js @@ -111,7 +111,7 @@ operations: [ {'_name': 'listCase'} ], - extraData: ['observableStats', 'taskStats', 'isOwner', 'shareCount', 'permissions'], + extraData: ['observableStats', 'taskStats', 'isOwner', 'shareCount', 'permissions', 'actionRequired'], onUpdate: function() { self.resetSelection(); } diff --git a/frontend/app/scripts/controllers/case/CaseTasksCtrl.js b/frontend/app/scripts/controllers/case/CaseTasksCtrl.js index ae277fea63..9cf31da969 100755 --- a/frontend/app/scripts/controllers/case/CaseTasksCtrl.js +++ b/frontend/app/scripts/controllers/case/CaseTasksCtrl.js @@ -68,7 +68,7 @@ {'_name': 'getCase', "idOrName": $scope.caseId}, {'_name': 'tasks'} ], - extraData: ['shareCount'], + extraData: ['shareCount', 'actionRequired'], //extraData: ['isOwner', 'shareCount'], onUpdate: function() { $scope.buildTaskGroups($scope.tasks.values); diff --git a/frontend/app/scripts/controllers/case/CaseTasksItemCtrl.js b/frontend/app/scripts/controllers/case/CaseTasksItemCtrl.js index fc0ef7f86f..d214fec5a3 100644 --- a/frontend/app/scripts/controllers/case/CaseTasksItemCtrl.js +++ b/frontend/app/scripts/controllers/case/CaseTasksItemCtrl.js @@ -1,7 +1,7 @@ (function () { 'use strict'; angular.module('theHiveControllers').controller('CaseTasksItemCtrl', - function ($scope, $rootScope, $state, $stateParams, $timeout, $uibModal, PaginatedQuerySrv, SecuritySrv, ModalSrv, CaseSrv, AuthenticationSrv, OrganisationSrv, CaseTabsSrv, CaseTaskSrv, PSearchSrv, TaskLogSrv, NotificationSrv, CortexSrv, StatSrv, task) { + function ($q, $scope, $rootScope, $state, $stateParams, $timeout, $uibModal, StreamSrv, PaginatedQuerySrv, SecuritySrv, ModalSrv, CaseSrv, AuthenticationSrv, OrganisationSrv, CaseTabsSrv, CaseTaskSrv, PSearchSrv, TaskLogSrv, NotificationSrv, CortexSrv, StatSrv, task) { var caseId = $stateParams.caseId, taskId = $stateParams.itemId; @@ -224,7 +224,7 @@ }; $scope.reloadTask = function() { - CaseTaskSrv.getById($scope.task._id) + return CaseTaskSrv.getById($scope.task._id) .then(function(data) { $scope.task = data; }) @@ -234,10 +234,18 @@ }; $scope.loadShares = function () { - return CaseTaskSrv.getShares(caseId, taskId) - .then(function(response) { - $scope.shares = response.data; - }); + if(SecuritySrv.checkPermissions(['manageShare'], $scope.userPermissions)) { + return CaseTaskSrv.getShares(caseId, taskId) + .then(function(response) { + + // Add action required flag to shares + _.each(response.data, function(share) { + share.actionRequired = !!$scope.task.extraData.actionRequiredMap[share.organisationName]; + }); + + $scope.shares = response.data; + }); + } }; $scope.removeShare = function(share) { @@ -304,6 +312,97 @@ }); }; + + + $scope.showAddLog = function(prompt) { + var modalInstance = $uibModal.open({ + animation: true, + keyboard: false, + backdrop: 'static', + templateUrl: 'views/partials/case/tasklogs/add-task-log.modal.html', + controller: 'AddTaskLogModalCtrl', + controllerAs: '$modal', + size: 'lg', + resolve: { + task: task, + config: function() { + return { + prompt: prompt + }; + } + } + }); + + return modalInstance.result; + }; + + $scope.markAsDone = function(task) { + CaseTaskSrv.promtForActionRequired('Require Action', 'Would you like to add a task log before marking the required action as DONE?') + .then(function(response) { + if(response === 'skip-log') { + return $q.resolve(); + } else { + return $scope.showAddLog('Please add a task log'); + } + }) + .then(function() { + return CaseTaskSrv.markAsDone(task._id, $scope.currentUser.organisation); + }) + .then(function() { + NotificationSrv.log('The task\'s required action is completed', 'success'); + }) + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('Error', 'Task required action failed to be marked as done', err.status); + } + }); + }; + + $scope.markAsActionRequired = function(task) { + CaseTaskSrv.promtForActionRequired('Require Action', 'Would you like to add a task log before requesting action?') + .then(function(response) { + if(response === 'skip-log') { + return $q.resolve(); + } else { + return $scope.showAddLog('Please add a task log'); + } + }) + .then(function() { + return CaseTaskSrv.markAsActionRequired(task._id, $scope.currentUser.organisation); + }) + .then(function() { + NotificationSrv.log('The task\'s required action flag has been set', 'success'); + }) + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('Error', 'Task request action failed', err.status); + } + }); + + }; + + $scope.markShareAsActionRequired = function(task, org) { + CaseTaskSrv.promtForActionRequired('Require Action', 'Would you like to add a task log before requesting action?') + .then(function(response) { + if(response === 'skip-log') { + return $q.resolve(); + } else { + return $scope.showAddLog('Please add a task log'); + } + }) + .then(function() { + return CaseTaskSrv.markAsActionRequired(task._id, org); + }) + .then(function() { + NotificationSrv.log('The task\'s required action flag has been set for organisation ' + org, 'success'); + }) + .catch(function(err) { + if(err && !_.isString(err)) { + NotificationSrv.error('Error', 'Task request action failed', err.status); + } + }); + }; + this.$onInit = function() { // Add tabs CaseTabsSrv.addTab($scope.tabName, { @@ -322,13 +421,39 @@ $('html,body').animate({scrollTop: $('body').offset().top}, 'fast'); }, 0); + // Add action required listener + StreamSrv.addListener({ + rootId: caseId, + objectType: 'case_task', + scope: $scope, + callback: function(updates) { + // Update action required indicators in task item page and shares list + _.each(updates, function(update) { + if(update.base.objectId === $scope.task._id ){ + + var updatedKeys = _.keys(update.base.details); + + var actionRequiredChange = _.find(updatedKeys, function(key) { + return key.startsWith('actionRequired'); + }); + + if(actionRequiredChange !== undefined) { + $scope.reloadTask() + .then(function() { + $scope.loadShares(); + }); + } + } + }); + } + }); // Prepare the scope data $scope.initScope(task); - if(SecuritySrv.checkPermissions(['manageShare'], $scope.userPermissions)) { - $scope.loadShares(); - } + // if(SecuritySrv.checkPermissions(['manageShare'], $scope.userPermissions)) { + $scope.loadShares(); + //} // $scope.organisations = organisations; // $scope.profiles = profiles; diff --git a/frontend/app/scripts/controllers/case/tasklogs/AddTaskLogModalCtrl.js b/frontend/app/scripts/controllers/case/tasklogs/AddTaskLogModalCtrl.js new file mode 100644 index 0000000000..7a9d95d31f --- /dev/null +++ b/frontend/app/scripts/controllers/case/tasklogs/AddTaskLogModalCtrl.js @@ -0,0 +1,77 @@ +/** + * Controller for About TheHive modal page + */ +(function() { + 'use strict'; + + angular.module('theHiveControllers').controller('AddTaskLogModalCtrl', function($rootScope, $scope, $uibModalInstance, TaskLogSrv, NotificationSrv, task, config) { + var self = this; + + this.task = task; + this.config = config; + + this.close = function() { + $uibModalInstance.close(); + }; + + this.cancel = function() { + $rootScope.markdownEditorObjects.newLog.hidePreview(); + + $uibModalInstance.dismiss(); + }; + + this.addLog = function() { + // this.close(); + if (this.state.attachmentCollapsed || !this.data.attachment) { + delete this.data.attachment; + } + + TaskLogSrv.save({ + 'taskId': self.task._id + }, self.data, function () { + // if(self.task.status === 'Waiting') { + // // Reload the task + // $scope.reloadTask(); + // } + // + delete self.data.attachment; + self.state.attachmentCollapsed = true; + self.data.message = ''; + + $rootScope.markdownEditorObjects.newLog.hidePreview(); + // $scope.adding = false; + // removeAllFiles is added by dropzone directive as control + self.state.removeAllFiles(); + + self.state.loading = false; + + self.close(); + }, function (response) { + NotificationSrv.error('Add Task Log', response.data, response.status); + self.state.loading = false; + }); + + }; + + this.$onInit = function() { + this.markdownEditorOptions = { + iconlibrary: 'fa', + addExtraButtons: true, + resize: 'vertical' + }; + + this.data = { + message: null, + attachment: null + }; + + this.state = { + attachmentCollapsed: true, + loading: false + }; + + $scope.$broadcast('beforeNewTaskLogShow'); + }; + } + ); +})(); diff --git a/frontend/app/scripts/directives/responder-actions.js b/frontend/app/scripts/directives/responder-actions.js index d8fbce6bb2..c23116f056 100644 --- a/frontend/app/scripts/directives/responder-actions.js +++ b/frontend/app/scripts/directives/responder-actions.js @@ -16,7 +16,7 @@ return; } - _.each(list.values, function(action) { + _.each(_.isArray(list) ? list : list.values, function(action) { if (action.status === 'Failure') { action.errorMessage = (JSON.parse(action.report) || {}).errorMessage; } diff --git a/frontend/app/scripts/services/api/CaseTaskSrv.js b/frontend/app/scripts/services/api/CaseTaskSrv.js index 962da87fdb..6b3e96e8a4 100644 --- a/frontend/app/scripts/services/api/CaseTaskSrv.js +++ b/frontend/app/scripts/services/api/CaseTaskSrv.js @@ -1,7 +1,7 @@ (function() { 'use strict'; angular.module('theHiveServices') - .service('CaseTaskSrv', function($resource, $http, $q, QuerySrv) { + .service('CaseTaskSrv', function($resource, $http, $q, QuerySrv, ModalSrv) { var resource = $resource('./api/case/:caseId/task/:taskId', {}, { update: { method: 'PATCH' @@ -17,13 +17,14 @@ var defer = $q.defer(); QuerySrv.call('v1', [{ - '_name': 'getTask', - 'idOrName': id + _name: 'getTask', + idOrName: id }], { name: 'get-task-' + id, page: { from: 0, - to: 1 + to: 1, + extraData: ['actionRequired', 'actionRequiredMap'] } }).then(function(response) { defer.resolve(response[0]); @@ -34,6 +35,18 @@ return defer.promise; }; + this.getActionRequiredMap = function(taskId) { + return $http.get('./api/v1/task/' + taskId + '/actionRequired'); + }; + + this.markAsDone = function(taskId, org) { + return $http.put('./api/v1/task/' + taskId + '/actionDone/' + org); + }; + + this.markAsActionRequired = function(taskId, org) { + return $http.put('./api/v1/task/' + taskId + '/actionRequired/' + org); + }; + this.getShares = function(caseId, taskId) { return $http.get('./api/case/' + caseId + '/task/' + taskId + '/shares'); }; @@ -55,5 +68,37 @@ }); }; + this.promtForActionRequired = function(title, prompt) { + var defer = $q.defer(); + + var confirmModal = ModalSrv.confirm( + title, + prompt, { + okText: 'Yes, add log', + actions: [ + { + flavor: 'default', + text: 'Proceed without log', + dismiss: 'skip-log' + } + ] + } + ); + + confirmModal.result + .then(function(/*response*/) { + defer.resolve('add-log'); + }) + .catch(function(err) { + if(err === 'skip-log') { + defer.resolve(err); + } else { + defer.reject(err); + } + }); + + return defer.promise; + }; + }); })(); diff --git a/frontend/app/scripts/services/common/data/StreamSrv.js b/frontend/app/scripts/services/common/data/StreamSrv.js index bde90b37fc..3382b37844 100644 --- a/frontend/app/scripts/services/common/data/StreamSrv.js +++ b/frontend/app/scripts/services/common/data/StreamSrv.js @@ -126,7 +126,10 @@ }); }); } - self.poll(); + + $timeout(function() { + self.poll(); + }, 0); }).catch(function(err) { // Initialize the stream; @@ -182,7 +185,7 @@ config.scope.$on(eventName, function(event, data) { if(!self.disabled) { config.callback(data); - } + } }); } }; diff --git a/frontend/app/scripts/services/common/ui/modal/ModalSrv.js b/frontend/app/scripts/services/common/ui/modal/ModalSrv.js index d5edb63db2..e5560536fe 100644 --- a/frontend/app/scripts/services/common/ui/modal/ModalSrv.js +++ b/frontend/app/scripts/services/common/ui/modal/ModalSrv.js @@ -12,6 +12,9 @@ this.confirm = function() { $uibModalInstance.close('ok'); }; + this.dismiss = function(value) { + $uibModalInstance.dismiss(value); + }; } angular.module('theHiveServices') @@ -19,6 +22,8 @@ this.confirm = function(title, message, config) { return $uibModal.open({ + keyboard: false, + backdrop: 'static', controller: ModalConfirmCtrl, templateUrl: 'views/components/common/modal/modal.confirm.html', controllerAs: '$modal', diff --git a/frontend/app/styles/main.css b/frontend/app/styles/main.css index 0425ebbcd9..686e27c413 100644 --- a/frontend/app/styles/main.css +++ b/frontend/app/styles/main.css @@ -284,6 +284,7 @@ pre.clearpre { .progress.task-progress .progress-bar { opacity: 0.8; + height: 4px; } .btn-canceled, .progress-bar-default { @@ -332,7 +333,7 @@ ul.observable-reports-summary li { .case-page .case-page-content { background-color: #FFF; - padding: 20px 10px 10px 10px; + padding: 10px 10px 10px 10px; } .case-details dt, diff --git a/frontend/app/views/components/common/modal/modal.confirm.html b/frontend/app/views/components/common/modal/modal.confirm.html index e3f4412a90..ebf871842c 100644 --- a/frontend/app/views/components/common/modal/modal.confirm.html +++ b/frontend/app/views/components/common/modal/modal.confirm.html @@ -6,7 +6,10 @@

{{$modal.message}}

diff --git a/frontend/app/views/components/common/observable-flags.component.html b/frontend/app/views/components/common/observable-flags.component.html index 91598e6c28..af90ffe3c0 100644 --- a/frontend/app/views/components/common/observable-flags.component.html +++ b/frontend/app/views/components/common/observable-flags.component.html @@ -36,7 +36,7 @@ -
+
diff --git a/frontend/app/views/components/sharing/task/sharing-list.html b/frontend/app/views/components/sharing/task/sharing-list.html new file mode 100644 index 0000000000..a500758f41 --- /dev/null +++ b/frontend/app/views/components/sharing/task/sharing-list.html @@ -0,0 +1,57 @@ +
+
+
No records
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + +
OrganisationProfileShared AtActions
+ + + + + + {{share.organisationName}} + + {{share.profileName}} + + {{share.profileName}} + + {{share.profileName}} + + + + + {{share.createdAt | shortDate}} + + Require Action + + + + Delete + +
+
+
diff --git a/frontend/app/views/partials/case/case.details.html b/frontend/app/views/partials/case/case.details.html index 79ead25542..bb252c4fd4 100644 --- a/frontend/app/views/partials/case/case.details.html +++ b/frontend/app/views/partials/case/case.details.html @@ -1,6 +1,6 @@
-

Summary

+

Basic Information

Title
diff --git a/frontend/app/views/partials/case/case.list.html b/frontend/app/views/partials/case/case.list.html index 36e3c63db6..e2f43aac03 100644 --- a/frontend/app/views/partials/case/case.list.html +++ b/frontend/app/views/partials/case/case.list.html @@ -91,16 +91,19 @@

List of cases ({{$vm.list.total || 0}} of {{$vm.caseCount}

- - - - ({{currentCase.extraData.shareCount || 0}}) - + +
+ + + + ({{currentCase.extraData.shareCount || 0}}) + +
@@ -108,10 +111,17 @@

List of cases ({{$vm.list.total || 0}} of {{$vm.caseCount}

- - - -
+
+ + + + + + + + + +
diff --git a/frontend/app/views/partials/case/case.tasks.html b/frontend/app/views/partials/case/case.tasks.html index b0636060f2..0811411f68 100755 --- a/frontend/app/views/partials/case/case.tasks.html +++ b/frontend/app/views/partials/case/case.tasks.html @@ -98,7 +98,12 @@

uib-tooltip="Shared from another organisation" tooltip-placement="right"> --> - {{task.title}} + + + + + {{task.title}} +

Closed after {{(task.endDate - task.startDate) | amDurationFormat : 'milliseconds'}} @@ -108,12 +113,15 @@

- +
+ + + + ({{task.extraData.shareCount || 0}}) + +
- - ({{task.extraData.shareCount || 0}}) - {{task.startDate | shortDate}} @@ -217,7 +225,14 @@

- {{task.title}} + + + + + + + {{task.title}} +
Closed after {{(task.endDate - task.startDate) | amDurationFormat : 'milliseconds'}} @@ -227,11 +242,14 @@

- + - - ({{task.extraData.shareCount || 0}}) - + + + ({{task.extraData.shareCount || 0}}) + +

{{task.startDate | shortDate}} diff --git a/frontend/app/views/partials/case/case.tasks.item.html b/frontend/app/views/partials/case/case.tasks.item.html index f2d96bdf0b..4edd2e65d3 100644 --- a/frontend/app/views/partials/case/case.tasks.item.html +++ b/frontend/app/views/partials/case/case.tasks.item.html @@ -1,4 +1,15 @@
+
+
+

+ Action Required! +

+
+ This task requires an action from your organisation. + +
+
+

Basic Information @@ -7,12 +18,19 @@

- Sharing + Sharing ({{shares.length || 0}}) | + + + + Require Action + + + {{task.flag ? 'Unflag' : 'Flag'}} @@ -233,12 +251,14 @@

Task sharing

- + >
diff --git a/frontend/app/views/partials/case/list/toolbar.html b/frontend/app/views/partials/case/list/toolbar.html index 8c6363308e..07be4940cc 100644 --- a/frontend/app/views/partials/case/list/toolbar.html +++ b/frontend/app/views/partials/case/list/toolbar.html @@ -57,10 +57,10 @@
Low Severity first
  • - Ascendant assignee + Ascendant assignee
  • - Descendant assignee + Descendant assignee
  • diff --git a/frontend/app/views/partials/case/tasklogs/add-task-log.modal.html b/frontend/app/views/partials/case/tasklogs/add-task-log.modal.html new file mode 100644 index 0000000000..0877c52b55 --- /dev/null +++ b/frontend/app/views/partials/case/tasklogs/add-task-log.modal.html @@ -0,0 +1,40 @@ +
    + + + +
    diff --git a/frontend/app/views/partials/main/index-mytasks.html b/frontend/app/views/partials/main/index-mytasks.html index 22bd1fd938..61fb289b55 100644 --- a/frontend/app/views/partials/main/index-mytasks.html +++ b/frontend/app/views/partials/main/index-mytasks.html @@ -23,8 +23,16 @@ {{value.group}}
    - - + + + + + + + + + {{value.title}} +
    Started @@ -34,7 +42,7 @@
    - {{value.startDate | showDate}} + {{value.startDate | shortDate}} diff --git a/frontend/app/views/partials/main/index-waitingtasks.html b/frontend/app/views/partials/main/index-waitingtasks.html index c7a7ee8ee4..37c18e1d0f 100644 --- a/frontend/app/views/partials/main/index-waitingtasks.html +++ b/frontend/app/views/partials/main/index-waitingtasks.html @@ -20,7 +20,18 @@ {{value.group}} -
    {{value.title}}
    +
    + + + + + + + + + {{value.title}} + +
    diff --git a/frontend/app/views/partials/observables/details/summary.html b/frontend/app/views/partials/observables/details/summary.html index 360ea6f016..bc01374393 100644 --- a/frontend/app/views/partials/observables/details/summary.html +++ b/frontend/app/views/partials/observables/details/summary.html @@ -2,7 +2,7 @@

    - Metadata + Basic Information
    diff --git a/frontend/bower.json b/frontend/bower.json index 26f486b888..ba58fd6409 100644 --- a/frontend/bower.json +++ b/frontend/bower.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.2-1", + "version": "4.0.3-1", "license": "AGPL-3.0", "dependencies": { "jquery": "^3.4.1", diff --git a/frontend/package.json b/frontend/package.json index d1bca2f2ce..963e781297 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.2-1", + "version": "4.0.3-1", "license": "AGPL-3.0", "repository": { "type": "git", diff --git a/migration/src/main/scala/org/thp/thehive/migration/th4/Output.scala b/migration/src/main/scala/org/thp/thehive/migration/th4/Output.scala index fe388fb90a..d6575d9963 100644 --- a/migration/src/main/scala/org/thp/thehive/migration/th4/Output.scala +++ b/migration/src/main/scala/org/thp/thehive/migration/th4/Output.scala @@ -586,7 +586,7 @@ class Output @Inject() ( _ = updateMetaData(log, inputLog.metaData) _ <- inputLog.attachments.toTry { inputAttachment => attachmentSrv.create(inputAttachment.name, inputAttachment.size, inputAttachment.contentType, inputAttachment.data).flatMap { attachment => - logSrv.addAttachment(log, attachment) + logSrv.logAttachmentSrv.create(LogAttachment(), log, attachment) } } } yield IdMapping(inputLog.metaData.id, log._id) diff --git a/misp/client/src/main/scala/org/thp/misp/client/MispClient.scala b/misp/client/src/main/scala/org/thp/misp/client/MispClient.scala index edae588bc5..e5d03cd2d6 100644 --- a/misp/client/src/main/scala/org/thp/misp/client/MispClient.scala +++ b/misp/client/src/main/scala/org/thp/misp/client/MispClient.scala @@ -1,19 +1,19 @@ package org.thp.misp.client -import java.util.Date - import akka.NotUsed import akka.stream.alpakka.json.scaladsl.JsonReader import akka.stream.scaladsl.{JsonFraming, Source} import akka.util.ByteString import org.thp.client.{ApplicationError, Authentication, ProxyWS} -import org.thp.misp.dto.{Attribute, Event, Organisation, Tag, User} +import org.thp.misp.dto._ import org.thp.scalligraph.InternalError +import org.thp.scalligraph.utils.FunctionalCondition._ import play.api.Logger import play.api.http.Status -import play.api.libs.json.{JsObject, JsString, JsValue, Json} +import play.api.libs.json._ import play.api.libs.ws.{WSClient, WSRequest} +import java.util.Date import scala.concurrent.duration.{Duration, DurationInt} import scala.concurrent.{Await, ExecutionContext, Future} import scala.util.{Failure, Success, Try} @@ -29,6 +29,7 @@ class MispClient( ws: WSClient, maxAge: Option[Duration], excludedOrganisations: Seq[String], + whitelistOrganisations: Seq[String], excludedTags: Set[String], whitelistTags: Set[String] ) { @@ -148,7 +149,7 @@ class MispClient( def getEvent(eventId: String)(implicit ec: ExecutionContext): Future[Event] = { logger.debug(s"Get MISP event $eventId") - require(!eventId.isEmpty) + require(eventId.nonEmpty) get(s"events/$eventId") .map(e => (e \ "Event").as[Event]) } @@ -163,7 +164,17 @@ class MispClient( .recover { case _ => Json.obj("name" -> name, "version" -> "", "status" -> "ERROR", "url" -> baseUrl) } def searchEvents(publishDate: Option[Date] = None)(implicit ec: ExecutionContext): Source[Event, NotUsed] = { - val query = publishDate.fold(JsObject.empty)(d => Json.obj("searchpublish_timestamp" -> ((d.getTime / 1000) + 1))) + val fromDate = (maxAge.map(a => System.currentTimeMillis() - a.toMillis).toSeq ++ publishDate.map(_.getTime)) + .sorted(Ordering[Long].reverse) + .headOption + .map(d => "searchpublish_timestamp" -> JsNumber((d / 1000) + 1)) + val tagFilter = (whitelistTags ++ excludedTags.map("!" + _)).map(JsString.apply) + val organisationFilter = (whitelistOrganisations ++ excludedOrganisations.map("!" + _)).map(JsString.apply) + val query = JsObject + .empty + .merge(fromDate)(_ + _) + .when(tagFilter.nonEmpty)(_ + ("searchtag" -> JsArray(tagFilter.toSeq))) + .when(organisationFilter.nonEmpty)(_ + ("searchorg" -> JsArray(organisationFilter))) logger.debug("Search MISP events") Source .futureSource(postStream("events/index", query)) @@ -172,27 +183,9 @@ class MispClient( val maybeEvent = Try(Json.parse(data.toArray[Byte]).as[Event]) maybeEvent.fold(error => { logger.warn(s"Event has invalid format: ${data.decodeString("UTF-8")}", error); Nil }, List(_)) } - .filterNot(isExcluded) .mapMaterializedValue(_ => NotUsed) } - def isExcluded(event: Event): Boolean = { - val eventTags = event.tags.map(_.name).toSet - if (whitelistTags.nonEmpty && (whitelistTags & eventTags).isEmpty) { - logger.debug(s"event ${event.id} is ignored because it doesn't contain any of whitelist tags (${whitelistTags.mkString(",")})") - true - } else if (excludedOrganisations.contains(event.orgc)) { - logger.debug(s"event ${event.id} is ignored because its organisation (${event.orgc}) is excluded") - true - } else { - val t = excludedTags.intersect(eventTags) - if ((excludedTags & eventTags).nonEmpty) { - logger.debug(s"event ${event.id} is ignored because one of its tags (${t.mkString(",")}) is excluded") - true - } else false - } - } - def searchAttributes(eventId: String, publishDate: Option[Date])(implicit ec: ExecutionContext): Source[Attribute, NotUsed] = { logger.debug(s"Search MISP attributes for event #$eventId ${publishDate.fold("")("from " + _)}") Source diff --git a/misp/connector/src/main/resources/play/reference-overrides.conf b/misp/connector/src/main/resources/play/reference-overrides.conf new file mode 100644 index 0000000000..6e6132ded4 --- /dev/null +++ b/misp/connector/src/main/resources/play/reference-overrides.conf @@ -0,0 +1,9 @@ +akka.actor { + serializers { + misp = "org.thp.thehive.connector.misp.services.MispSerializer" + } + + serialization-bindings { + "org.thp.thehive.connector.misp.services.MispMessage" = misp + } +} diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/controllers/v0/MispCtrl.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/controllers/v0/MispCtrl.scala index a9b06ff214..ddccfd7d66 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/controllers/v0/MispCtrl.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/controllers/v0/MispCtrl.scala @@ -2,18 +2,18 @@ package org.thp.thehive.connector.misp.controllers.v0 import akka.actor.ActorRef import com.google.inject.name.Named -import javax.inject.{Inject, Singleton} import org.thp.scalligraph.EntityIdOrName import org.thp.scalligraph.controllers.Entrypoint import org.thp.scalligraph.models.Database import org.thp.scalligraph.traversal.TraversalOps._ -import org.thp.thehive.connector.misp.services.{MispActor, MispExportSrv} +import org.thp.thehive.connector.misp.services.{MispExportSrv, Synchro} import org.thp.thehive.models.Permissions import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.{AlertSrv, CaseSrv} import play.api.mvc.{Action, AnyContent, Results} +import javax.inject.{Inject, Singleton} import scala.concurrent.{ExecutionContext, Future} import scala.util.Success @@ -31,7 +31,7 @@ class MispCtrl @Inject() ( def sync: Action[AnyContent] = entrypoint("sync MISP events") .authPermitted(Permissions.manageOrganisation) { _ => - mispActor ! MispActor.Synchro + mispActor ! Synchro Success(Results.NoContent) } diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala index fd52006516..cc9e0b8236 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala @@ -2,24 +2,19 @@ package org.thp.thehive.connector.misp.services import akka.actor.{Actor, ActorRef, ActorSystem, Cancellable} import akka.cluster.singleton.{ClusterSingletonProxy, ClusterSingletonProxySettings} -import javax.inject.{Inject, Named, Provider} import org.thp.scalligraph.auth.UserSrv import play.api.Logger -import scala.concurrent.Future -import scala.util.{Failure, Success, Try} +import javax.inject.{Inject, Named, Provider} -object MispActor { - case object Synchro - case class EndOfSynchro(status: Try[Unit]) -} +sealed trait MispMessage +case object Synchro extends MispMessage class MispActor @Inject() ( connector: Connector, mispImportSrv: MispImportSrv, userSrv: UserSrv ) extends Actor { - import MispActor._ import context.dispatcher lazy val logger: Logger = Logger(getClass) @@ -27,34 +22,22 @@ class MispActor @Inject() ( override def preStart(): Unit = { super.preStart() logger.info(s"[$self] Starting actor MISP") - context.become(waiting(context.system.scheduler.scheduleOnce(connector.syncInitialDelay, self, Synchro))) + context.become(receive(context.system.scheduler.scheduleOnce(connector.syncInitialDelay, self, Synchro))) } override def receive: Receive = { case other => logger.warn(s"Unknown message $other (${other.getClass})") } - def running: Receive = { - case Synchro => logger.info("MISP synchronisation is already in progress") - case EndOfSynchro(Success(_)) => - logger.info("MISP synchronisation is complete") - context.become(waiting(context.system.scheduler.scheduleOnce(connector.syncInterval, self, Synchro))) - case EndOfSynchro(Failure(error)) => - logger.error("MISP synchronisation fails", error) - context.become(waiting(context.system.scheduler.scheduleOnce(connector.syncInterval, self, Synchro))) - case other => logger.warn(s"Unknown message $other (${other.getClass})") - } - - def waiting(scheduledSynchronisation: Cancellable): Receive = { + def receive(scheduledSynchronisation: Cancellable): Receive = { case Synchro => scheduledSynchronisation.cancel() - context.become(running) logger.info(s"Synchronising MISP events for ${connector.clients.map(_.name).mkString(",")}") - Future - .traverse(connector.clients.filter(_.canImport))(mispImportSrv.syncMispEvents(_)(userSrv.getSystemAuthContext)) - .map(_ => ()) - .onComplete(status => self ! EndOfSynchro(status)) - case other => logger.warn(s"Unknown message $other (${other.getClass})") + connector.clients.filter(_.canImport).foreach { mispClient => + mispImportSrv.syncMispEvents(mispClient)(userSrv.getSystemAuthContext) + } + logger.info("MISP synchronisation is complete") + context.become(receive(context.system.scheduler.scheduleOnce(connector.syncInterval, self, Synchro))) } } diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala index 4318dd1ad4..f4a5990d2b 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala @@ -1,27 +1,31 @@ package org.thp.thehive.connector.misp.services -import java.nio.file.Files -import java.util.Date - import akka.stream.Materializer import akka.stream.scaladsl.{FileIO, Sink, Source} import akka.util.ByteString -import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.process.traversal.P +import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.misp.dto.{Attribute, Event, Tag => MispTag} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers.FFile import org.thp.scalligraph.models._ import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.utils.FunctionalCondition._ import org.thp.scalligraph.{EntityName, RichSeq} +import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.models._ import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services._ import play.api.Logger +import play.api.libs.json._ -import scala.concurrent.{ExecutionContext, Future} +import java.nio.file.Files +import java.util.Date +import javax.inject.{Inject, Named, Singleton} +import scala.concurrent.duration.DurationInt +import scala.concurrent.{Await, ExecutionContext} import scala.util.{Failure, Success, Try} @Singleton @@ -33,6 +37,7 @@ class MispImportSrv @Inject() ( observableTypeSrv: ObservableTypeSrv, attachmentSrv: AttachmentSrv, caseTemplateSrv: CaseTemplateSrv, + auditSrv: AuditSrv, @Named("with-thehive-schema") db: Database, implicit val ec: ExecutionContext, implicit val mat: Materializer @@ -69,27 +74,25 @@ class MispImportSrv @Inject() ( ) } - def convertAttributeType(attributeCategory: String, attributeType: String): Try[(ObservableType with Entity, Seq[String])] = { + def convertAttributeType(attributeCategory: String, attributeType: String)(implicit + graph: Graph + ): Try[(ObservableType with Entity, Seq[String])] = { val obsTypeFromConfig = connector .attributeConverter(attributeCategory, attributeType) .flatMap { attrConv => - db.roTransaction { implicit graph => - observableTypeSrv - .get(attrConv.`type`) - .headOption - .map(_ -> attrConv.tags) - } + observableTypeSrv + .get(attrConv.`type`) + .headOption + .map(_ -> attrConv.tags) } - db.roTransaction { implicit graph => - obsTypeFromConfig - .orElse(observableTypeSrv.get(EntityName(attributeType)).headOption.map(_ -> Nil)) - .fold(observableTypeSrv.getOrFail(EntityName("other")).map(_ -> Seq.empty[String]))(Success(_)) - } + obsTypeFromConfig + .orElse(observableTypeSrv.get(EntityName(attributeType)).headOption.map(_ -> Nil)) + .fold(observableTypeSrv.getOrFail(EntityName("other")).map(_ -> Seq.empty[String]))(Success(_)) } def attributeToObservable( attribute: Attribute - ): List[(Observable, ObservableType with Entity, Set[String], Either[String, (String, String, Source[ByteString, _])])] = + )(implicit graph: Graph): List[(Observable, ObservableType with Entity, Set[String], Either[String, (String, String, Source[ByteString, _])])] = attribute .`type` .split('|') @@ -152,23 +155,23 @@ class MispImportSrv @Inject() ( Nil } - def getLastSyncDate(client: TheHiveMispClient, mispOrganisation: String, organisations: Seq[Organisation with Entity]): Option[Date] = { - val lastOrgSynchro = db.roTransaction { implicit graph => - client - .organisationFilter(organisationSrv.startTraversal) - .group( - _.by, - _.by( - _.alerts - .filterBySource(mispOrganisation) - .filterByType("misp") - .value(a => a.lastSyncDate) - .max - ) + def getLastSyncDate(client: TheHiveMispClient, mispOrganisation: String, organisations: Seq[Organisation with Entity])(implicit + graph: Graph + ): Option[Date] = { + val lastOrgSynchro = client + .organisationFilter(organisationSrv.startTraversal) + .group( + _.by, + _.by( + _.alerts + .filterBySource(mispOrganisation) + .filterByType("misp") + .value(a => a.lastSyncDate) + .max ) - .head - }.values -// .asInstanceOf[Seq[Date]] + ) + .head + .values if (lastOrgSynchro.size == organisations.size && organisations.nonEmpty) Some(lastOrgSynchro.min) else None @@ -179,37 +182,42 @@ class MispImportSrv @Inject() ( observable: Observable, observableType: ObservableType with Entity, data: String, - tags: Set[String] - )(implicit authContext: AuthContext): Try[Observable with Entity] = - db.tryTransaction { implicit graph => - alertSrv - .get(alert) - .observables - .filterOnType(observableType.name) - .filterOnData(data) - .richObservable - .headOption match { - case None => - logger.debug(s"Observable ${observableType.name}:$data doesn't exist, create it") - for { - richObservable <- observableSrv.create(observable, observableType, data, tags, Nil) - _ <- alertSrv.addObservable(alert, richObservable) - } yield richObservable.observable - case Some(richObservable) => - logger.debug(s"Observable ${observableType.name}:$data exists, update it") - for { - updatedObservable <- - Some(observableSrv.get(richObservable.observable)) - .map(t => if (richObservable.message != observable.message) t.update(_.message, observable.message) else t) - .map(t => if (richObservable.tlp != observable.tlp) t.update(_.tlp, observable.tlp) else t) - .map(t => if (richObservable.ioc != observable.ioc) t.update(_.ioc, observable.ioc) else t) - .map(t => if (richObservable.sighted != observable.sighted) t.update(_.sighted, observable.sighted) else t) - .get - .getOrFail("Observable") - _ <- observableSrv.updateTagNames(updatedObservable, tags) - } yield updatedObservable - } + tags: Set[String], + creation: Boolean + )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + + val existingObservable = + if (creation) None + else + alertSrv + .get(alert) + .observables + .filterOnType(observableType.name) + .filterOnData(data) + .richObservable + .headOption + existingObservable match { + case None => + logger.debug(s"Observable ${observableType.name}:$data doesn't exist, create it") + for { + richObservable <- observableSrv.create(observable, observableType, data, tags, Nil) + _ <- alertSrv.addObservable(alert, richObservable) + } yield () + case Some(richObservable) => + logger.debug(s"Observable ${observableType.name}:$data exists, update it") + for { + updatedObservable <- + observableSrv + .get(richObservable.observable) + .when(richObservable.message != observable.message)(_.update(_.message, observable.message)) + .when(richObservable.tlp != observable.tlp)(_.update(_.tlp, observable.tlp)) + .when(richObservable.ioc != observable.ioc)(_.update(_.ioc, observable.ioc)) + .when(richObservable.sighted != observable.sighted)(_.update(_.sighted, observable.sighted)) + .getOrFail("Observable") + _ <- observableSrv.updateTagNames(updatedObservable, tags) + } yield () } + } def updateOrCreateObservable( alert: Alert with Entity, @@ -218,115 +226,101 @@ class MispImportSrv @Inject() ( filename: String, contentType: String, src: Source[ByteString, _], - tags: Set[String] - )(implicit authContext: AuthContext): Future[Observable with Entity] = - db.roTransaction { implicit graph => - alertSrv - .get(alert) - .observables - .filterOnType(observableType.name) - .filterOnAttachmentName(filename) - .filterOnAttachmentName(contentType) - .richObservable - .headOption - } match { + tags: Set[String], + creation: Boolean + )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + val existingObservable = + if (creation) None + else + alertSrv + .get(alert) + .observables + .filterOnType(observableType.name) + .filterOnAttachmentName(filename) + .filterOnAttachmentName(contentType) + .richObservable + .headOption + existingObservable match { case None => logger.debug(s"Observable ${observableType.name}:$filename:$contentType doesn't exist, create it") val file = Files.createTempFile("misp-attachment-", "") - (for { - _ <- src.runWith(FileIO.toPath(file)) - fFile = FFile(filename, file, contentType) - createdObservable <- Future.fromTry { - db.tryTransaction { implicit graph => - for { - createdAttachment <- attachmentSrv.create(fFile) - richObservable <- observableSrv.create(observable, observableType, createdAttachment, tags, Nil) - _ <- alertSrv.addObservable(alert, richObservable) - } yield richObservable - } - } - } yield createdObservable.observable) - .andThen { case _ => Files.delete(file) } + Await.result(src.runWith(FileIO.toPath(file)), 1.hour) + val fFile = FFile(filename, file, contentType) + for { + createdAttachment <- attachmentSrv.create(fFile) + richObservable <- observableSrv.create(observable, observableType, createdAttachment, tags, Nil) + _ <- alertSrv.addObservable(alert, richObservable) + _ = Files.delete(file) + } yield () case Some(richObservable) => logger.debug(s"Observable ${observableType.name}:$filename:$contentType exists, update it") - Future.fromTry { - db.tryTransaction { implicit graph => - for { - updatedObservable <- - Some(observableSrv.get(richObservable.observable)) - .map(t => if (richObservable.message != observable.message) t.update(_.message, observable.message) else t) - .map(t => if (richObservable.tlp != observable.tlp) t.update(_.tlp, observable.tlp) else t) - .map(t => if (richObservable.ioc != observable.ioc) t.update(_.ioc, observable.ioc) else t) - .map(t => if (richObservable.sighted != observable.sighted) t.update(_.sighted, observable.sighted) else t) - .get - .getOrFail("Observable") - _ <- observableSrv.updateTagNames(updatedObservable, tags) - } yield updatedObservable - } - } + for { + updatedObservable <- + observableSrv + .get(richObservable.observable) + .when(richObservable.message != observable.message)(_.update(_.message, observable.message)) + .when(richObservable.tlp != observable.tlp)(_.update(_.tlp, observable.tlp)) + .when(richObservable.ioc != observable.ioc)(_.update(_.ioc, observable.ioc)) + .when(richObservable.sighted != observable.sighted)(_.update(_.sighted, observable.sighted)) + .getOrFail("Observable") + _ <- observableSrv.updateTagNames(updatedObservable, tags) + } yield () } + } def importAttibutes(client: TheHiveMispClient, event: Event, alert: Alert with Entity, lastSynchro: Option[Date])(implicit + graph: Graph, authContext: AuthContext - ): Future[Unit] = { - logger.debug(s"importAttibutes ${client.name}#${event.id}") + ): Unit = { + logger.debug(s"importAttributes ${client.name}#${event.id}") val startSyncDate = new Date - client - .searchAttributes(event.id, lastSynchro) - .mapConcat(attributeToObservable) - .fold( - Map.empty[ - (String, String), - (Observable, ObservableType with Entity, Set[String], Either[String, (String, String, Source[ByteString, _])]) - ] - ) { - case (distinctMap, data @ (_, t, _, Left(d))) => distinctMap + ((t.name, d) -> data) - case (distinctMap, data @ (_, t, _, Right((n, _, _)))) => distinctMap + ((t.name, n) -> data) - } - .mapConcat { m => - m.values.toList - } - .runWith(Sink.foreachAsync(1) { - case (observable, observableType, tags, Left(data)) => - updateOrCreateObservable(alert, observable, observableType, data, tags) - .fold( - error => { - logger.error(s"Unable to create observable $observable ${observableType.name}:$data", error) - Future.failed(error) - }, - _ => Future.successful(()) - ) - case (observable, observableType, tags, Right((filename, contentType, src))) => - updateOrCreateObservable(alert, observable, observableType, filename, contentType, src, tags) - .transform { - case Success(_) => Success(()) - case Failure(error) => - logger.error( - s"Unable to create observable $observable ${observableType.name}:$filename", - error - ) - Success(()) - } - }) - .flatMap { _ => - Future.fromTry { - logger.info("Removing old observables") - db.tryTransaction { implicit graph => - alertSrv - .get(alert) - .observables - .filter( - _.or( - _.has(_._updatedAt, P.lt(startSyncDate)), - _.and(_.hasNot(_._updatedAt), _.has(_._createdAt, P.lt(startSyncDate))) - ) - ) - .toIterator - .toTry { obs => - logger.info(s"Remove $obs") - observableSrv.remove(obs) - } - }.map(_ => ()) + val queue = + client + .searchAttributes(event.id, lastSynchro) + .mapConcat(attributeToObservable) + .fold( + Map.empty[ + (String, String), + (Observable, ObservableType with Entity, Set[String], Either[String, (String, String, Source[ByteString, _])]) + ] + ) { + case (distinctMap, data @ (_, t, _, Left(d))) => distinctMap + ((t.name, d) -> data) + case (distinctMap, data @ (_, t, _, Right((n, _, _)))) => distinctMap + ((t.name, n) -> data) + } + .mapConcat { m => + m.values.toList + } + .runWith(Sink.queue[(Observable, ObservableType with Entity, Set[String], Either[String, (String, String, Source[ByteString, _])])]) + QueueIterator(queue).foreach { + case (observable, observableType, tags, Left(data)) => + updateOrCreateObservable(alert, observable, observableType, data, tags ++ client.observableTags, lastSynchro.isEmpty) + .recover { + case error => + logger.error(s"Unable to create observable $observable ${observableType.name}:$data", error) + } + case (observable, observableType, tags, Right((filename, contentType, src))) => + updateOrCreateObservable(alert, observable, observableType, filename, contentType, src, tags ++ client.observableTags, lastSynchro.isEmpty) + .recover { + case error => + logger.error(s"Unable to create observable $observable ${observableType.name}:$filename", error) + } + } + + logger.info("Removing old observables") + alertSrv + .get(alert) + .observables + .filter( + _.or( + _.has(_._updatedAt, P.lt(startSyncDate)), + _.and(_.hasNot(_._updatedAt), _.has(_._createdAt, P.lt(startSyncDate))) + ) + ) + .toIterator + .foreach { obs => + logger.debug(s"Delete $obs") + observableSrv.remove(obs).recover { + case error => logger.error(s"Fail to delete observable $obs", error) } } } @@ -339,78 +333,99 @@ class MispImportSrv @Inject() ( mispOrganisation: String, event: Event, caseTemplate: Option[CaseTemplate with Entity] - )(implicit - authContext: AuthContext - ): Try[Alert with Entity] = { + )(implicit graph: Graph, authContext: AuthContext): Try[(Alert with Entity, JsObject)] = { logger.debug(s"updateOrCreateAlert ${client.name}#${event.id} for organisation ${organisation.name}") eventToAlert(client, event).flatMap { alert => - db.tryTransaction { implicit graph => - organisationSrv - .get(organisation) - .alerts - .getBySourceId("misp", mispOrganisation, event.id) - .richAlert - .headOption match { - case None => // if the related alert doesn't exist, create it - logger.debug(s"Event ${client.name}#${event.id} has no related alert for organisation ${organisation.name}") - alertSrv - .create(alert, organisation, event.tags.map(_.name).toSet, Seq(), caseTemplate) - .map(_.alert) - case Some(richAlert) => - logger.debug(s"Event ${client.name}#${event.id} have already been imported for organisation ${organisation.name}, updating the alert") - for { - updatedAlert <- - Some(alertSrv.get(richAlert.alert)) - .map(t => if (richAlert.title != alert.title) t.update(_.title, alert.title) else t) - .map(t => if (richAlert.lastSyncDate != alert.lastSyncDate) t.update(_.lastSyncDate, alert.lastSyncDate) else t) - .map(t => if (richAlert.description != alert.description) t.update(_.description, alert.description) else t) - .map(t => if (richAlert.severity != alert.severity) t.update(_.severity, alert.severity) else t) - .map(t => if (richAlert.date != alert.date) t.update(_.date, alert.date) else t) - .map(t => if (richAlert.tlp != alert.tlp) t.update(_.tlp, alert.tlp) else t) - .map(t => if (richAlert.pap != alert.pap) t.update(_.pap, alert.pap) else t) - .map(t => if (richAlert.externalLink != alert.externalLink) t.update(_.externalLink, alert.externalLink) else t) - .get - .getOrFail("Alert") - _ <- alertSrv.updateTagNames(updatedAlert, event.tags.map(_.name).toSet) - } yield updatedAlert - } + organisationSrv + .get(organisation) + .alerts + .getBySourceId("misp", mispOrganisation, event.id) + .richAlert + .headOption match { + case None => // if the related alert doesn't exist, create it + logger.debug(s"Event ${client.name}#${event.id} has no related alert for organisation ${organisation.name}") + alertSrv + .create(alert, organisation, event.tags.map(_.name).toSet, Seq(), caseTemplate) + .map(ra => ra.alert -> ra.toJson.asInstanceOf[JsObject]) + case Some(richAlert) => + logger.debug(s"Event ${client.name}#${event.id} have already been imported for organisation ${organisation.name}, updating the alert") + val (updatedAlertTraversal, updatedFields) = (alertSrv.get(richAlert.alert), JsObject.empty) + .when(richAlert.title != alert.title)(_.update(_.title, alert.title), _ + ("title" -> JsString(alert.title))) + .when(richAlert.lastSyncDate != alert.lastSyncDate)( + _.update(_.lastSyncDate, alert.lastSyncDate), + _ + ("lastSyncDate" -> JsNumber(alert.lastSyncDate.getTime)) + ) + .when(richAlert.description != alert.description)( + _.update(_.description, alert.description), + _ + ("description" -> JsString(alert.description)) + ) + .when(richAlert.severity != alert.severity)(_.update(_.severity, alert.severity), _ + ("severity" -> JsNumber(alert.severity))) + .when(richAlert.date != alert.date)(_.update(_.date, alert.date), _ + ("date" -> JsNumber(alert.date.getTime))) + .when(richAlert.tlp != alert.tlp)(_.update(_.tlp, alert.tlp), _ + ("tlp" -> JsNumber(alert.tlp))) + .when(richAlert.pap != alert.pap)(_.update(_.pap, alert.pap), _ + ("pap" -> JsNumber(alert.pap))) + .when(richAlert.externalLink != alert.externalLink)( + _.update(_.externalLink, alert.externalLink), + _ + ("externalLink" -> alert.externalLink.fold[JsValue](JsNull)(JsString.apply)) + ) + val tags = event.tags.map(_.name) + for { + (addedTags, removedTags) <- alertSrv.updateTagNames(richAlert.alert, tags.toSet) + updatedAlert <- updatedAlertTraversal.getOrFail("Alert") + updatedFieldWithTags = + if (addedTags.nonEmpty || removedTags.nonEmpty) updatedFields + ("tags" -> JsArray(tags.map(JsString))) else updatedFields + } yield (updatedAlert, updatedFieldWithTags) } } } - def syncMispEvents(client: TheHiveMispClient)(implicit authContext: AuthContext): Future[Unit] = - Future.fromTry(client.currentOrganisationName).flatMap { mispOrganisation => - lazy val caseTemplate = client.caseTemplate.flatMap { caseTemplateName => - db.roTransaction { implicit graph => - caseTemplateSrv.get(EntityName(caseTemplateName)).headOption - } - } - logger.debug(s"Get eligible organisations") - val organisations = db.roTransaction { implicit graph => - client.organisationFilter(organisationSrv.startTraversal).toSeq - } - val lastSynchro = getLastSyncDate(client, mispOrganisation, organisations) - logger.debug(s"Last synchronisation is $lastSynchro") - client - .searchEvents(publishDate = lastSynchro) - .runWith(Sink.foreachAsync(1) { event => - logger.debug(s"Importing event ${client.name}#${event.id} in organisation(s): ${organisations.mkString(",")}") - Future - .traverse(organisations) { organisation => - Future - .fromTry(updateOrCreateAlert(client, organisation, mispOrganisation, event, caseTemplate)) - .flatMap(alert => importAttibutes(client, event, alert, lastSynchro)) - .recover { - case error => - logger.warn(s"Unable to create alert from MISP event ${client.name}#${event.id}", error) - } + def syncMispEvents(client: TheHiveMispClient)(implicit authContext: AuthContext): Unit = + client + .currentOrganisationName + .fold( + error => logger.error("Unable to get MISP organisation", error), + mispOrganisation => { + + val caseTemplate = client.caseTemplate.flatMap { caseTemplateName => + db.roTransaction { implicit graph => + caseTemplateSrv.get(EntityName(caseTemplateName)).headOption } - .map(_ => ()) - .recover { - case error => - logger.warn(s"Unable to create alert from MISP event ${client.name}#${event.id}", error) + } + + logger.debug(s"Get eligible organisations") + val organisations = db.roTransaction { implicit graph => + client.organisationFilter(organisationSrv.startTraversal).toSeq + } + val lastSynchro = db.roTransaction { implicit graph => + getLastSyncDate(client, mispOrganisation, organisations) + } + + logger.debug(s"Last synchronisation is $lastSynchro") + val queue = client + .searchEvents(publishDate = lastSynchro) + .runWith(Sink.queue[Event]) + QueueIterator(queue).foreach { event => + logger.debug(s"Importing event ${client.name}#${event.id} in organisation(s): ${organisations.mkString(",")}") + organisations.foreach { organisation => + db.tryTransaction { implicit graph => + auditSrv.mergeAudits { + updateOrCreateAlert(client, organisation, mispOrganisation, event, caseTemplate) + .map { + case (alert, updatedFields) => + importAttibutes(client, event, alert, if (alert._updatedBy.isEmpty) None else lastSynchro) + (alert, updatedFields) + } + .recoverWith { + case error => + logger.warn(s"Unable to create alert from MISP event ${client.name}#${event.id}", error) + Failure(error) + } + } { + case (alert, updatedFields) if alert._updatedBy.isDefined => auditSrv.alert.update(alert, updatedFields) + case (alert, updatedFields) => auditSrv.alert.create(alert, updatedFields) + } + } } - }) - .map(_ => ()) - } + } + } + ) } diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispSerializer.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispSerializer.scala new file mode 100644 index 0000000000..9315bb568d --- /dev/null +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispSerializer.scala @@ -0,0 +1,23 @@ +package org.thp.thehive.connector.misp.services + +import akka.serialization.Serializer + +import java.io.NotSerializableException + +class MispSerializer extends Serializer { + override def identifier: Int = -222314660 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case Synchro => Array(0) + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => Synchro + case _ => throw new NotSerializableException + } +} diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/QueueIterator.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/QueueIterator.scala new file mode 100644 index 0000000000..d117f39932 --- /dev/null +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/QueueIterator.scala @@ -0,0 +1,49 @@ +package org.thp.thehive.connector.misp.services + +import akka.stream.scaladsl.SinkQueueWithCancel +import play.api.Logger + +import java.util.NoSuchElementException +import scala.concurrent.Await +import scala.concurrent.duration.{Duration, DurationInt} +import scala.util.control.NonFatal + +class QueueIterator[T](queue: SinkQueueWithCancel[T], readTimeout: Duration) extends Iterator[T] { + lazy val logger: Logger = Logger(getClass) + + private var nextValue: Option[T] = None + private var isFinished: Boolean = false + def getNextValue(): Unit = + try nextValue = Await.result(queue.pull(), readTimeout) + catch { + case NonFatal(e) => + logger.error("Stream fails", e) + isFinished = true + nextValue = None + } + override def hasNext: Boolean = + if (isFinished) false + else { + if (nextValue.isEmpty) + getNextValue() + nextValue.isDefined + } + + override def next(): T = + nextValue match { + case Some(v) => + nextValue = None + v + case _ if !isFinished => + getNextValue() + nextValue.getOrElse { + isFinished = true + throw new NoSuchElementException + } + case _ => throw new NoSuchElementException + } +} + +object QueueIterator { + def apply[T](queue: SinkQueueWithCancel[T], readTimeout: Duration = 1.minute) = new QueueIterator[T](queue, readTimeout) +} diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/TheHiveMispClient.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/TheHiveMispClient.scala index 50f0bc2dfb..176b6bec4a 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/TheHiveMispClient.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/TheHiveMispClient.scala @@ -23,11 +23,12 @@ case class TheHiveMispClientConfig( wsConfig: ProxyWSConfig = ProxyWSConfig(AhcWSClientConfig(), None), maxAge: Option[Duration], excludedOrganisations: Seq[String] = Nil, + whitelistOrganisations: Seq[String] = Nil, excludedTags: Set[String] = Set.empty, whitelistTags: Set[String] = Set.empty, purpose: MispPurpose.Value = MispPurpose.ImportAndExport, caseTemplate: Option[String], - artifactTags: Seq[String] = Nil, + observableTags: Seq[String] = Nil, exportCaseTags: Boolean = false, exportObservableTags: Boolean = false, includedTheHiveOrganisations: Seq[String] = Seq("*"), @@ -44,11 +45,12 @@ object TheHiveMispClientConfig { wsConfig <- (JsPath \ "wsConfig").readWithDefault[ProxyWSConfig](ProxyWSConfig(AhcWSClientConfig(), None)) maxAge <- (JsPath \ "maxAge").readNullable[Duration] excludedOrganisations <- (JsPath \ "exclusion" \ "organisations").readWithDefault[Seq[String]](Nil) + whitelistOrganisations <- (JsPath \ "whitelist" \ "organisations").readWithDefault[Seq[String]](Nil) excludedTags <- (JsPath \ "exclusion" \ "tags").readWithDefault[Set[String]](Set.empty) whitelistTags <- (JsPath \ "whitelist" \ "tags").readWithDefault[Set[String]](Set.empty) purpose <- (JsPath \ "purpose").readWithDefault[MispPurpose.Value](MispPurpose.ImportAndExport) caseTemplate <- (JsPath \ "caseTemplate").readNullable[String] - artifactTags <- (JsPath \ "tags").readWithDefault[Seq[String]](Nil) + observableTags <- (JsPath \ "tags").readWithDefault[Seq[String]](Nil) exportCaseTags <- (JsPath \ "exportCaseTags").readWithDefault[Boolean](false) exportObservableTags <- (JsPath \ "exportObservableTags").readWithDefault[Boolean](false) includedTheHiveOrganisations <- (JsPath \ "includedTheHiveOrganisations").readWithDefault[Seq[String]](Seq("*")) @@ -60,11 +62,12 @@ object TheHiveMispClientConfig { wsConfig, maxAge, excludedOrganisations, + whitelistOrganisations, excludedTags, whitelistTags, purpose, caseTemplate, - artifactTags, + observableTags, exportCaseTags, exportObservableTags, includedTheHiveOrganisations, @@ -82,7 +85,7 @@ object TheHiveMispClientConfig { "whitelistTags" -> Json.obj("whitelist" -> cfg.whitelistTags), "purpose" -> cfg.purpose, "caseTemplate" -> cfg.caseTemplate, - "tags" -> cfg.artifactTags, + "tags" -> cfg.observableTags, "exportCaseTags" -> cfg.exportCaseTags, "includedTheHiveOrganisations" -> cfg.includedTheHiveOrganisations, "excludedTheHiveOrganisations" -> cfg.excludedTheHiveOrganisations @@ -98,11 +101,12 @@ class TheHiveMispClient( ws: WSClient, maxAge: Option[Duration], excludedOrganisations: Seq[String], + whitelistOrganisations: Seq[String], excludedTags: Set[String], whitelistTags: Set[String], purpose: MispPurpose.Value, val caseTemplate: Option[String], - artifactTags: Seq[String], // FIXME use artifactTags + val observableTags: Seq[String], val exportCaseTags: Boolean, val exportObservableTags: Boolean, includedTheHiveOrganisations: Seq[String], @@ -114,6 +118,7 @@ class TheHiveMispClient( ws, maxAge, excludedOrganisations, + whitelistOrganisations, excludedTags, whitelistTags ) { @@ -126,11 +131,12 @@ class TheHiveMispClient( new ProxyWS(config.wsConfig, mat), config.maxAge, config.excludedOrganisations, + config.whitelistOrganisations, config.excludedTags, config.whitelistTags, config.purpose, config.caseTemplate, - config.artifactTags, + config.observableTags, config.exportCaseTags, config.exportObservableTags, config.includedTheHiveOrganisations, diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala index 915ef429c0..734d52dc47 100644 --- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala +++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala @@ -73,9 +73,8 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification "MISP service" should { "import events" in testApp { app => - await(app[MispImportSrv].syncMispEvents(app[TheHiveMispClient])(authContext))(1.minute) - app[Database].roTransaction { implicit graph => + app[MispImportSrv].syncMispEvents(app[TheHiveMispClient]) app[AlertSrv].startTraversal.getBySourceId("misp", "ORGNAME", "1").visible.getOrFail("Alert") } must beSuccessfulTry( Alert( @@ -108,7 +107,7 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification .map(o => (o.`type`.name, o.data.map(_.data), o.tlp, o.message, o.tags.map(_.toString).toSet)) // println(observables.mkString("\n")) observables must contain( - ("filename", Some("plop"), 0, Some(""), Set("TH-test", "misp:category=\"Artifacts dropped\"", "misp:type=\"filename\"")) + ("filename", Some("plop"), 0, Some(""), Set("TEST", "TH-test", "misp:category=\"Artifacts dropped\"", "misp:type=\"filename\"")) ) } } diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/TestMispClientProvider.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/TestMispClientProvider.scala index a881cc7298..1cea733c7d 100644 --- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/TestMispClientProvider.scala +++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/TestMispClientProvider.scala @@ -59,11 +59,12 @@ class TestMispClientProvider @Inject() (Action: DefaultActionBuilder, implicit v ws = ws, maxAge = None, excludedOrganisations = Nil, + whitelistOrganisations = Nil, excludedTags = Set.empty, whitelistTags = Set.empty, purpose = MispPurpose.ImportAndExport, caseTemplate = None, - artifactTags = Seq("TEST"), + observableTags = Seq("TEST"), exportCaseTags = true, exportObservableTags = true, includedTheHiveOrganisations = Seq("*"), diff --git a/project/build.properties b/project/build.properties index 947bdd3020..c06db1bb2e 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.3 +sbt.version=1.4.5 diff --git a/thehive/app/org/thp/thehive/ClusterSetup.scala b/thehive/app/org/thp/thehive/ClusterSetup.scala index 308e0f6d8b..7a60a7f625 100644 --- a/thehive/app/org/thp/thehive/ClusterSetup.scala +++ b/thehive/app/org/thp/thehive/ClusterSetup.scala @@ -1,17 +1,20 @@ package org.thp.thehive -import akka.actor.ActorSystem +import akka.actor.{Actor, ActorSystem, Props} import akka.cluster.Cluster +import akka.cluster.ClusterEvent.{InitialStateAsEvents, MemberEvent, _} import com.google.inject.Injector -import javax.inject.{Inject, Singleton} import play.api.{Configuration, Logger} +import javax.inject.{Inject, Singleton} + @Singleton class ClusterSetup @Inject() ( configuration: Configuration, system: ActorSystem, injector: Injector ) { + system.actorOf(Props[ClusterListener]) if (configuration.get[Seq[String]]("akka.cluster.seed-nodes").isEmpty) { val logger: Logger = Logger(getClass) logger.info("Initialising cluster") @@ -19,4 +22,24 @@ class ClusterSetup @Inject() ( cluster.join(cluster.system.provider.getDefaultAddress) } GuiceAkkaExtension(system).set(injector) + +} + +class ClusterListener extends Actor { + val cluster: Cluster = Cluster(context.system) + val logger: Logger = Logger(getClass) + + override def preStart(): Unit = cluster.subscribe(self, initialStateMode = InitialStateAsEvents, classOf[MemberEvent], classOf[UnreachableMember]) + override def postStop(): Unit = cluster.unsubscribe(self) + + def receive: Receive = { + case MemberUp(member) => logger.info(s"Member is Up: ${member.address}") + case UnreachableMember(member) => logger.info(s"Member detected as unreachable: $member") + case MemberRemoved(member, previousStatus) => logger.info(s"Member is Removed: ${member.address} after $previousStatus") + case MemberJoined(member) => logger.debug(s"Member is joined: $member") + case MemberWeaklyUp(member) => logger.debug(s"Member is weaklyUp: $member") + case MemberLeft(member) => logger.debug(s"Member is left: $member") + case MemberExited(member) => logger.debug(s"Member is exited: $member") + case MemberDowned(member) => logger.debug(s"Member is downed: $member") + } } diff --git a/thehive/app/org/thp/thehive/controllers/v0/AuditCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AuditCtrl.scala index 1e14c0873f..9ced6e86b6 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AuditCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AuditCtrl.scala @@ -13,7 +13,6 @@ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.models.{Audit, RichAudit} import org.thp.thehive.services.AuditOps._ -import org.thp.thehive.services.FlowActor.{AuditIds, FlowId} import org.thp.thehive.services._ import play.api.libs.json.{JsArray, JsObject, Json} import play.api.mvc.{Action, AnyContent, Results} diff --git a/thehive/app/org/thp/thehive/controllers/v0/AuditRenderer.scala b/thehive/app/org/thp/thehive/controllers/v0/AuditRenderer.scala index 4b93313587..51f18fea54 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AuditRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AuditRenderer.scala @@ -24,7 +24,7 @@ trait AuditRenderer { def taskToJson: Traversal.V[Task] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = _.project( - _.by(_.richTask.domainMap(_.toJson)) + _.by(_.richTaskWithoutActionRequired.domainMap(_.toJson)) .by(t => caseToJson(t.`case`)) ).domainMap { case (task, case0) => task.as[JsObject] + ("case" -> case0) diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala index 5a1d824314..0c23b45f7c 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseTemplateCtrl.scala @@ -1,6 +1,5 @@ package org.thp.thehive.controllers.v0 -import javax.inject.{Inject, Named, Singleton} import org.scalactic.Accumulation._ import org.thp.scalligraph.controllers._ import org.thp.scalligraph.models.{Database, UMapping} @@ -10,7 +9,7 @@ import org.thp.scalligraph.traversal.{Converter, IteratorOutput, Traversal} import org.thp.scalligraph.{AttributeCheckingError, BadRequestError, EntityIdOrName, RichSeq} import org.thp.thehive.controllers.v0.Conversion._ import org.thp.thehive.dto.v0.{InputCaseTemplate, InputTask} -import org.thp.thehive.models.{CaseTemplate, Permissions, RichCaseTemplate, Tag} +import org.thp.thehive.models.{CaseTemplate, Permissions, RichCaseTemplate, Tag, Task} import org.thp.thehive.services.CaseTemplateOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TagOps._ @@ -21,7 +20,9 @@ import play.api.Logger import play.api.libs.json.{JsObject, Json} import play.api.mvc.{Action, AnyContent, Results} +import javax.inject.{Inject, Named, Singleton} import scala.util.Failure + @Singleton class CaseTemplateCtrl @Inject() ( override val entrypoint: Entrypoint, @@ -113,6 +114,9 @@ class PublicCaseTemplate @Inject() ( (range, caseTemplateSteps, _) => caseTemplateSteps.richPage(range.from, range.to, withTotal = true)(_.richCaseTemplate) ) override val outputQuery: Query = Query.output[RichCaseTemplate, Traversal.V[CaseTemplate]](_.richCaseTemplate) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[CaseTemplate], Traversal.V[Task]]("tasks", (caseTemplateSteps, _) => caseTemplateSteps.tasks) + ) override val publicProperties: PublicProperties = PublicPropertyListBuilder[CaseTemplate] .property("name", UMapping.string)(_.field.updatable) .property("displayName", UMapping.string)(_.field.updatable) @@ -165,25 +169,27 @@ class PublicCaseTemplate @Inject() ( } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) - .property("tasks", UMapping.jsonNative.sequence)(_.select(_.tasks.richTask.domainMap(_.toJson)).custom { // FIXME select the correct mapping - (_, value, vertex, _, graph, authContext) => - val fp = FieldsParser[InputTask] + .property("tasks", UMapping.jsonNative.sequence)( + _.select(_.tasks.richTaskWithoutActionRequired.domainMap(_.toJson)).custom { // FIXME select the correct mapping + (_, value, vertex, _, graph, authContext) => + val fp = FieldsParser[InputTask] - caseTemplateSrv.get(vertex)(graph).tasks.remove() - for { - caseTemplate <- caseTemplateSrv.get(vertex)(graph).getOrFail("CaseTemplate") - tasks <- value.validatedBy(t => fp(Field(t))).badMap(AttributeCheckingError(_)).toTry - createdTasks <- - tasks - .toTry(t => - t.owner - .map(o => userSrv.getOrFail(EntityIdOrName(o))(graph)) - .flip - .flatMap(owner => taskSrv.create(t.toTask, owner)(graph, authContext)) - ) - _ <- createdTasks.toTry(t => caseTemplateSrv.addTask(caseTemplate, t.task)(graph, authContext)) - } yield Json.obj("tasks" -> createdTasks.map(_.toJson)) - }) + caseTemplateSrv.get(vertex)(graph).tasks.remove() + for { + caseTemplate <- caseTemplateSrv.get(vertex)(graph).getOrFail("CaseTemplate") + tasks <- value.validatedBy(t => fp(Field(t))).badMap(AttributeCheckingError(_)).toTry + createdTasks <- + tasks + .toTry(t => + t.owner + .map(o => userSrv.getOrFail(EntityIdOrName(o))(graph)) + .flip + .flatMap(owner => taskSrv.create(t.toTask, owner)(graph, authContext)) + ) + _ <- createdTasks.toTry(t => caseTemplateSrv.addTask(caseTemplate, t.task)(graph, authContext)) + } yield Json.obj("tasks" -> createdTasks.map(_.toJson)) + } + ) .build } diff --git a/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala index 65599ccb62..e7bcf77a20 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala @@ -2,9 +2,8 @@ package org.thp.thehive.controllers.v0 import java.lang.{Boolean => JBoolean} import java.util.Date - import javax.inject.{Inject, Named, Singleton} -import org.thp.scalligraph.NotFoundError +import org.thp.scalligraph.{EntityId, NotFoundError} import org.thp.scalligraph.controllers.Entrypoint import org.thp.scalligraph.models.Database import org.thp.scalligraph.query.PublicProperty @@ -218,6 +217,7 @@ class DescribeCtrl @Inject() ( case c if c == classOf[Hash] => Seq(PropertyDescription(prop.propertyName, "string")) case c if classOf[Number].isAssignableFrom(c) => Seq(PropertyDescription(prop.propertyName, "number")) case c if c == classOf[String] => Seq(PropertyDescription(prop.propertyName, "string")) + case c if c == classOf[EntityId] => Seq(PropertyDescription(prop.propertyName, "string")) case _ => logger.warn(s"Unrecognized property $prop. Add a custom description") Seq(PropertyDescription(prop.propertyName, "unknown")) diff --git a/thehive/app/org/thp/thehive/controllers/v0/LogCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/LogCtrl.scala index ca1bc469a4..a2b2b47a2b 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/LogCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/LogCtrl.scala @@ -38,10 +38,8 @@ class LogCtrl @Inject() ( .get(EntityIdOrName(taskId)) .can(Permissions.manageTask) .getOrFail("Task") - createdLog <- logSrv.create(inputLog.toLog, task) - attachment <- inputLog.attachment.map(logSrv.addAttachment(createdLog, _)).flip - richLog = RichLog(createdLog, attachment.toList) - } yield Results.Created(richLog.toJson) + createdLog <- logSrv.create(inputLog.toLog, task, inputLog.attachment) + } yield Results.Created(createdLog.toJson) } def update(logId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala index 629711357d..1a8ba2b182 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/ObservableCtrl.scala @@ -1,20 +1,16 @@ package org.thp.thehive.controllers.v0 -import java.io.FilterInputStream -import java.nio.file.Files - -import javax.inject.{Inject, Named, Singleton} import net.lingala.zip4j.ZipFile import net.lingala.zip4j.model.FileHeader import org.thp.scalligraph._ import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ -import org.thp.scalligraph.models.{Database, UMapping} +import org.thp.scalligraph.models.{Database, Entity, UMapping} import org.thp.scalligraph.query._ import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{Converter, IteratorOutput, Traversal} import org.thp.thehive.controllers.v0.Conversion._ -import org.thp.thehive.dto.v0.InputObservable +import org.thp.thehive.dto.v0.{InputAttachment, InputObservable} import org.thp.thehive.models._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.ObservableOps._ @@ -27,8 +23,11 @@ import play.api.libs.Files.DefaultTemporaryFileCreator import play.api.libs.json.{JsArray, JsObject, JsValue, Json} import play.api.mvc.{Action, AnyContent, Results} +import java.io.FilterInputStream +import java.nio.file.Files +import javax.inject.{Inject, Named, Singleton} import scala.collection.JavaConverters._ -import scala.util.Success +import scala.util.{Failure, Success} @Singleton class ObservableCtrl @Inject() ( @@ -38,6 +37,7 @@ class ObservableCtrl @Inject() ( observableSrv: ObservableSrv, observableTypeSrv: ObservableTypeSrv, caseSrv: CaseSrv, + attachmentSrv: AttachmentSrv, errorHandler: ErrorHandler, @Named("v0") override val queryExecutor: QueryExecutor, override val publicData: PublicObservable, @@ -68,48 +68,60 @@ class ObservableCtrl @Inject() ( } .map { case (case0, observableType) => - val initialSuccessesAndFailures: (Seq[JsValue], Seq[JsValue]) = - inputAttachObs.foldLeft[(Seq[JsValue], Seq[JsValue])](Nil -> Nil) { - case ((successes, failures), inputObservable) => - inputObservable.attachment.fold((successes, failures)) { attachment => - db - .tryTransaction { implicit graph => - observableSrv - .create(inputObservable.toObservable, observableType, attachment, inputObservable.tags, Nil) - .flatMap(o => caseSrv.addObservable(case0, o).map(_ => o.toJson)) - } - .fold( - e => - successes -> (failures :+ errorHandler.toErrorResult(e)._2 ++ Json - .obj( - "object" -> Json - .obj("data" -> s"file:${attachment.filename}", "attachment" -> Json.obj("name" -> attachment.filename)) - )), - s => (successes :+ s) -> failures - ) - } + val (successes, failures) = inputAttachObs + .flatMap { obs => + obs.attachment.map(createAttachmentObservable(case0, obs, observableType, _)) ++ + obs.data.map(createSimpleObservable(case0, obs, observableType, _)) } - - val (successes, failures) = inputObservable - .data - .foldLeft(initialSuccessesAndFailures) { - case ((successes, failures), data) => - db - .tryTransaction { implicit graph => - observableSrv - .create(inputObservable.toObservable, observableType, data, inputObservable.tags, Nil) - .flatMap(o => caseSrv.addObservable(case0, o).map(_ => o.toJson)) - } - .fold( - failure => (successes, failures :+ errorHandler.toErrorResult(failure)._2 ++ Json.obj("object" -> Json.obj("data" -> data))), - success => (successes :+ success, failures) - ) + .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { + case ((s, f), Right(o)) => (s :+ o, f) + case ((s, f), Left(o)) => (s, f :+ o) } if (failures.isEmpty) Results.Created(JsArray(successes)) else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures)) } } + def createSimpleObservable( + `case`: Case with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + data: String + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + observableSrv + .create(inputObservable.toObservable, observableType, data, inputObservable.tags, Nil) + .flatMap(o => caseSrv.addObservable(`case`, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) + } + + def createAttachmentObservable( + `case`: Case with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + fileOrAttachment: Either[FFile, InputAttachment] + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + val observable = fileOrAttachment match { + case Left(file) => observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) + case Right(attachment) => + for { + attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id) + obs <- observableSrv.create(inputObservable.toObservable, observableType, attach, inputObservable.tags, Nil) + } yield obs + } + observable.flatMap(o => caseSrv.addObservable(`case`, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case _ => + val filename = fileOrAttachment.fold(_.filename, _.name) + Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) + } + def get(observableId: String): Action[AnyContent] = entrypoint("get observable") .authRoTransaction(db) { implicit request => implicit graph => @@ -214,8 +226,8 @@ class ObservableCtrl @Inject() ( } } - private def getZipFiles(observable: InputObservable, zipPassword: Option[String])(implicit authContext: AuthContext): Seq[InputObservable] = - observable.attachment.toSeq.flatMap { attachment => + private def getZipFiles(observable: InputObservable, zipPassword: Option[String]): Seq[InputObservable] = + observable.attachment.flatMap(_.swap.toSeq).flatMap { attachment => val zipFile = new ZipFile(attachment.filepath.toFile) val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] @@ -225,7 +237,7 @@ class ObservableCtrl @Inject() ( files .filterNot(_.isDirectory) .flatMap(extractAndCheckSize(zipFile, _)) - .map(ffile => observable.copy(attachment = Some(ffile))) + .map(ffile => observable.copy(attachment = Seq(Left(ffile)))) } } @@ -270,7 +282,16 @@ class PublicObservable @Inject() ( ) override val outputQuery: Query = Query.output[RichObservable, Traversal.V[Observable]](_.richObservable) override val extraQueries: Seq[ParamQuery[_]] = Seq( - // Query.output[(RichObservable, JsObject, Option[RichCase])] + Query[Traversal.V[Observable], Traversal.V[Organisation]]( + "organisations", + (observableSteps, authContext) => observableSteps.organisations.visible(authContext) + ), + Query[Traversal.V[Observable], Traversal.V[Observable]]( + "similar", + (observableSteps, authContext) => observableSteps.filteredSimilar.visible(authContext) + ), + Query[Traversal.V[Observable], Traversal.V[Case]]("case", (observableSteps, _) => observableSteps.`case`), + Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert) ) override val publicProperties: PublicProperties = PublicPropertyListBuilder[Observable] .property("status", UMapping.string)(_.select(_.constant("Ok")).readonly) diff --git a/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala index 944368cc08..9e03134ae1 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala @@ -1,9 +1,9 @@ package org.thp.thehive.controllers.v0 -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.auth.{AuthCapability, AuthSrv, MultiAuthSrv} import org.thp.scalligraph.controllers.Entrypoint import org.thp.scalligraph.models.Database +import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationFormat import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} import org.thp.scalligraph.{EntityName, ScalligraphApplicationLoader} import org.thp.thehive.TheHiveModule @@ -12,7 +12,9 @@ import org.thp.thehive.services.{Connector, UserSrv} import play.api.libs.json.{JsObject, JsString, Json} import play.api.mvc.{AbstractController, Action, AnyContent, Results} +import javax.inject.{Inject, Named, Singleton} import scala.collection.immutable +import scala.concurrent.duration.FiniteDuration import scala.util.Success @Singleton @@ -26,6 +28,12 @@ class StatusCtrl @Inject() ( ) { val passwordConfig: ConfigItem[String, String] = appConfig.item[String]("datastore.attachment.password", "Password used to protect attachment ZIP") + def password: String = passwordConfig.get + val streamPollingDurationConfig: ConfigItem[FiniteDuration, FiniteDuration] = + appConfig.item[FiniteDuration]("stream.longPolling.pollingDuration", "amount of time the UI have to wait before polling the stream") + def streamPollingDuration: FiniteDuration = streamPollingDurationConfig.get + + private def getVersion(c: Class[_]): String = Option(c.getPackage.getImplementationVersion).getOrElse("SNAPSHOT") def get: Action[AnyContent] = entrypoint("status") { _ => @@ -44,18 +52,15 @@ class StatusCtrl @Inject() ( case multiAuthSrv: MultiAuthSrv => Json.toJson(multiAuthSrv.providerNames) case _ => JsString(authSrv.name) }), - "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), - "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso) + "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), + "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso), + "pollingDuration" -> streamPollingDuration.toMillis ) ) ) ) } - def password: String = passwordConfig.get - - private def getVersion(c: Class[_]): String = Option(c.getPackage.getImplementationVersion).getOrElse("SNAPSHOT") - def health: Action[AnyContent] = entrypoint("health") { _ => val dbStatus = db diff --git a/thehive/app/org/thp/thehive/controllers/v0/TaskCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/TaskCtrl.scala index bbc3924cc8..7cb42df840 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/TaskCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/TaskCtrl.scala @@ -104,11 +104,13 @@ class PublicTask @Inject() (taskSrv: TaskSrv, organisationSrv: OrganisationSrv, "page", FieldsParser[OutputParam], { - case (OutputParam(from, to, _, 0), taskSteps, _) => + case (OutputParam(from, to, _, 0), taskSteps, authContext) => taskSteps.richPage(from, to, withTotal = true)(_.richTask.domainMap(_ -> (None: Option[RichCase]))) case (OutputParam(from, to, _, _), taskSteps, authContext) => taskSteps.richPage(from, to, withTotal = true)( - _.richTaskWithCustomRenderer(_.`case`.richCase(authContext).domainMap(c => Some(c): Option[RichCase])) + _.richTaskWithCustomRenderer( + _.`case`.richCase(authContext).domainMap(c => Some(c): Option[RichCase]) + ) ) } ) @@ -117,10 +119,19 @@ class PublicTask @Inject() (taskSrv: TaskSrv, organisationSrv: OrganisationSrv, FieldsParser[EntityIdOrName], (idOrName, graph, authContext) => taskSrv.get(idOrName)(graph).visible(authContext) ) - override val outputQuery: Query = Query.output[RichTask, Traversal.V[Task]](_.richTask) + override val outputQuery: Query = + Query.outputWithContext[RichTask, Traversal.V[Task]]((taskSteps, authContext) => taskSteps.richTask) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query.output[(RichTask, Option[RichCase])], - Query[Traversal.V[Task], Traversal.V[User]]("assignableUsers", (taskSteps, authContext) => taskSteps.assignableUsers(authContext)) + Query[Traversal.V[Task], Traversal.V[User]]("assignableUsers", (taskSteps, authContext) => taskSteps.assignableUsers(authContext)), + Query.init[Traversal.V[Task]]( + "waitingTask", + (graph, authContext) => taskSrv.startTraversal(graph).has(_.status, TaskStatus.Waiting).visible(authContext) + ), + Query[Traversal.V[Task], Traversal.V[Log]]("logs", (taskSteps, _) => taskSteps.logs), + Query[Traversal.V[Task], Traversal.V[Case]]("case", (taskSteps, _) => taskSteps.`case`), + Query[Traversal.V[Task], Traversal.V[CaseTemplate]]("caseTemplate", (taskSteps, _) => taskSteps.caseTemplate), + Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext)) ) override val publicProperties: PublicProperties = PublicPropertyListBuilder[Task] .property("title", UMapping.string)(_.field.updatable) diff --git a/thehive/app/org/thp/thehive/controllers/v0/TheHiveQueryExecutor.scala b/thehive/app/org/thp/thehive/controllers/v0/TheHiveQueryExecutor.scala index b55bbac9f1..efd82dcf72 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/TheHiveQueryExecutor.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/TheHiveQueryExecutor.scala @@ -10,9 +10,11 @@ import org.thp.scalligraph.traversal.Traversal import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.utils.RichType import org.thp.scalligraph.{BadRequestError, EntityIdOrName, GlobalQueryExecutor} -import org.thp.thehive.models.{Case, Log, Observable, Task} +import org.thp.thehive.models.{Alert, Case, CaseTemplate, Log, Observable, Task} import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.LogOps._ +import org.thp.thehive.services.AlertOps._ +import org.thp.thehive.services.CaseTemplateOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.TaskOps._ @@ -67,21 +69,25 @@ class TheHiveQueryExecutor @Inject() ( override lazy val publicProperties: PublicProperties = publicDatas.foldLeft(metaProperties)(_ ++ _.publicProperties) val childTypes: PartialFunction[(ru.Type, String), ru.Type] = { - case (tpe, "case_task_log") if SubType(tpe, ru.typeOf[Traversal.V[Task]]) => ru.typeOf[Traversal.V[Log]] - case (tpe, "case_task") if SubType(tpe, ru.typeOf[Traversal.V[Case]]) => ru.typeOf[Traversal.V[Task]] - case (tpe, "case_artifact") if SubType(tpe, ru.typeOf[Traversal.V[Case]]) => ru.typeOf[Traversal.V[Observable]] + case (tpe, "case_task_log") if SubType(tpe, ru.typeOf[Traversal.V[Task]]) => ru.typeOf[Traversal.V[Log]] + case (tpe, "case_task") if SubType(tpe, ru.typeOf[Traversal.V[Case]]) => ru.typeOf[Traversal.V[Task]] + case (tpe, "case_artifact") if SubType(tpe, ru.typeOf[Traversal.V[Case]]) => ru.typeOf[Traversal.V[Observable]] + case (tpe, "alert_artifact") if SubType(tpe, ru.typeOf[Traversal.V[Alert]]) => ru.typeOf[Traversal.V[Observable]] + case (tpe, "caseTemplate_task") if SubType(tpe, ru.typeOf[Traversal.V[CaseTemplate]]) => ru.typeOf[Traversal.V[Task]] } - val parentTypes: PartialFunction[ru.Type, ru.Type] = { - case tpe if SubType(tpe, ru.typeOf[Traversal.V[Task]]) => ru.typeOf[Traversal.V[Case]] - case tpe if SubType(tpe, ru.typeOf[Traversal.V[Observable]]) => ru.typeOf[Traversal.V[Case]] - case tpe if SubType(tpe, ru.typeOf[Traversal.V[Log]]) => ru.typeOf[Traversal.V[Observable]] + val parentTypes: PartialFunction[(ru.Type, String), ru.Type] = { + case (tpe, "caseTemplate") if SubType(tpe, ru.typeOf[Traversal.V[Task]]) => ru.typeOf[Traversal.V[CaseTemplate]] + case (tpe, _) if SubType(tpe, ru.typeOf[Traversal.V[Task]]) => ru.typeOf[Traversal.V[Case]] + case (tpe, "alert") if SubType(tpe, ru.typeOf[Traversal.V[Observable]]) => ru.typeOf[Traversal.V[Alert]] + case (tpe, _) if SubType(tpe, ru.typeOf[Traversal.V[Observable]]) => ru.typeOf[Traversal.V[Case]] + case (tpe, _) if SubType(tpe, ru.typeOf[Traversal.V[Log]]) => ru.typeOf[Traversal.V[Task]] } override val customFilterQuery: FilterQuery = FilterQuery(db, publicProperties) { (tpe, globalParser) => - FieldsParser.debug("parentChildFilter") { - case (_, FObjOne("_parent", ParentIdFilter(_, parentId))) if parentTypes.isDefinedAt(tpe) => - Good(new ParentIdInputFilter(parentId)) - case (path, FObjOne("_parent", ParentQueryFilter(_, parentFilterField))) if parentTypes.isDefinedAt(tpe) => - globalParser(parentTypes(tpe)).apply(path, parentFilterField).map(query => new ParentQueryInputFilter(query)) + FieldsParser("parentChildFilter") { + case (_, FObjOne("_parent", ParentIdFilter(parentType, parentId))) if parentTypes.isDefinedAt((tpe, parentType)) => + Good(new ParentIdInputFilter(parentType, parentId)) + case (path, FObjOne("_parent", ParentQueryFilter(parentType, parentFilterField))) if parentTypes.isDefinedAt((tpe, parentType)) => + globalParser(parentTypes((tpe, parentType))).apply(path, parentFilterField).map(query => new ParentQueryInputFilter(parentType, query)) case (path, FObjOne("_child", ChildQueryFilter(childType, childQueryField))) if childTypes.isDefinedAt((tpe, childType)) => globalParser(childTypes((tpe, childType))).apply(path, childQueryField).map(query => new ChildQueryInputFilter(childType, query)) } @@ -107,7 +113,7 @@ object ParentIdFilter { .fold(Some(_), _ => None) } -class ParentIdInputFilter(parentId: String) extends InputQuery[Traversal.Unk, Traversal.Unk] { +class ParentIdInputFilter(parentType: String, parentId: String) extends InputQuery[Traversal.Unk, Traversal.Unk] { override def apply( db: Database, publicProperties: PublicProperties, @@ -119,12 +125,31 @@ class ParentIdInputFilter(parentId: String) extends InputQuery[Traversal.Unk, Tr .getTypeArgs(traversalType, ru.typeOf[Traversal[_, _, _]]) .headOption .collect { + case t if t <:< ru.typeOf[Task] && parentType == "caseTemplate" => + traversal + .asInstanceOf[Traversal.V[Task]] + .filter(_.caseTemplate.get(EntityIdOrName(parentId))) + .asInstanceOf[Traversal.Unk] case t if t <:< ru.typeOf[Task] => - traversal.asInstanceOf[Traversal.V[Task]].filter(_.`case`.get(EntityIdOrName(parentId))).asInstanceOf[Traversal.Unk] + traversal + .asInstanceOf[Traversal.V[Task]] + .filter(_.`case`.get(EntityIdOrName(parentId))) + .asInstanceOf[Traversal.Unk] + case t if t <:< ru.typeOf[Observable] && parentType == "alert" => + traversal + .asInstanceOf[Traversal.V[Observable]] + .filter(_.alert.get(EntityIdOrName(parentId))) + .asInstanceOf[Traversal.Unk] case t if t <:< ru.typeOf[Observable] => - traversal.asInstanceOf[Traversal.V[Observable]].filter(_.`case`.get(EntityIdOrName(parentId))).asInstanceOf[Traversal.Unk] + traversal + .asInstanceOf[Traversal.V[Observable]] + .filter(_.`case`.get(EntityIdOrName(parentId))) + .asInstanceOf[Traversal.Unk] case t if t <:< ru.typeOf[Log] => - traversal.asInstanceOf[Traversal.V[Log]].filter(_.task.get(EntityIdOrName(parentId))).asInstanceOf[Traversal.Unk] + traversal + .asInstanceOf[Traversal.V[Log]] + .filter(_.task.get(EntityIdOrName(parentId))) + .asInstanceOf[Traversal.Unk] } .getOrElse(throw BadRequestError(s"$traversalType hasn't parent")) } @@ -140,7 +165,8 @@ object ParentQueryFilter { .fold(Some(_), _ => None) } -class ParentQueryInputFilter(parentFilter: InputQuery[Traversal.Unk, Traversal.Unk]) extends InputQuery[Traversal.Unk, Traversal.Unk] { +class ParentQueryInputFilter(parentType: String, parentFilter: InputQuery[Traversal.Unk, Traversal.Unk]) + extends InputQuery[Traversal.Unk, Traversal.Unk] { override def apply( db: Database, publicProperties: PublicProperties, @@ -163,9 +189,11 @@ class ParentQueryInputFilter(parentFilter: InputQuery[Traversal.Unk, Traversal.U .getTypeArgs(traversalType, ru.typeOf[Traversal[_, _, _]]) .headOption .collect { - case t if t <:< ru.typeOf[Task] => filter[Task, Case](_.`case`) - case t if t <:< ru.typeOf[Observable] => filter[Observable, Case](_.`case`) - case t if t <:< ru.typeOf[Log] => filter[Log, Task](_.task) + case t if t <:< ru.typeOf[Task] && parentType == "caseTemplate" => filter[Task, CaseTemplate](_.caseTemplate) + case t if t <:< ru.typeOf[Task] => filter[Task, Case](_.`case`) + case t if t <:< ru.typeOf[Observable] && parentType == "alert" => filter[Observable, Alert](_.alert) + case t if t <:< ru.typeOf[Observable] => filter[Observable, Case](_.`case`) + case t if t <:< ru.typeOf[Log] => filter[Log, Task](_.task) } .getOrElse(throw BadRequestError(s"$traversalType hasn't parent")) } @@ -205,9 +233,11 @@ class ChildQueryInputFilter(childType: String, childFilter: InputQuery[Traversal .getTypeArgs(traversalType, ru.typeOf[Traversal[_, _, _]]) .headOption .collect { - case t if t <:< ru.typeOf[Case] && childType == "case_task" => filter[Case, Task](_.tasks(authContext)) - case t if t <:< ru.typeOf[Case] && childType == "case_artifact" => filter[Case, Observable](_.observables(authContext)) - case t if t <:< ru.typeOf[Task] && childType == "case_task_log" => filter[Task, Log](_.logs) + case t if t <:< ru.typeOf[Case] && childType == "case_task" => filter[Case, Task](_.tasks(authContext)) + case t if t <:< ru.typeOf[Case] && childType == "case_artifact" => filter[Case, Observable](_.observables(authContext)) + case t if t <:< ru.typeOf[Task] && childType == "case_task_log" => filter[Task, Log](_.logs) + case t if t <:< ru.typeOf[Alert] && childType == "alert_artifact" => filter[Alert, Observable](_.observables) + case t if t <:< ru.typeOf[CaseTemplate] && childType == "caseTemplate_task" => filter[CaseTemplate, Task](_.tasks) } .getOrElse(throw BadRequestError(s"$traversalType hasn't child $childType")) } diff --git a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala index 23e0682d1b..ac257b06e7 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/AlertRenderer.scala @@ -10,7 +10,7 @@ import org.thp.thehive.models.{Alert, RichCase, SimilarStats} import org.thp.thehive.services.AlertOps._ import play.api.libs.json._ -trait AlertRenderer { +trait AlertRenderer extends BaseRenderer[Alert] { implicit val similarCaseWrites: Writes[(RichCase, SimilarStats)] = Writes[(RichCase, SimilarStats)] { case (richCase, similarStats) => Json.obj( @@ -39,32 +39,12 @@ trait AlertRenderer { _.similarCases(None).fold.domainMap(sc => JsArray(sc.sorted.map(Json.toJson(_)))) } - def alertStatsRenderer[D, G, C <: Converter[D, G]](extraData: Set[String])(implicit - authContext: AuthContext - ): Traversal.V[Alert] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => - def addData[T]( - name: String - )(f: Traversal.V[Alert] => Traversal[JsValue, T, Converter[JsValue, T]]): Traversal[JsObject, JMap[String, Any], Converter[ - JsObject, - JMap[String, Any] - ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => - val dataTraversal = f(traversal.start) - t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => - t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[T])) - } - } - - if (extraData.isEmpty) traversal.constant2(JsObject.empty) - else { - val dataName = extraData.toSeq - dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( - traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => - JsObject.empty - ) - ) { - case (f, "similarCases") => addData("similarCases")(similarCasesStats)(f) - case (f, _) => f - } - } + def alertStatsRenderer(extraData: Set[String])( + implicit authContext: AuthContext + ): Traversal.V[Alert] => JsTraversal = { implicit traversal => + baseRenderer(extraData, traversal, { + case (f, "similarCases") => addData("similarCases", f)(similarCasesStats) + case (f, _) => f + }) } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/BaseRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/BaseRenderer.scala new file mode 100644 index 0000000000..f671190351 --- /dev/null +++ b/thehive/app/org/thp/thehive/controllers/v1/BaseRenderer.scala @@ -0,0 +1,36 @@ +package org.thp.thehive.controllers.v1 + +import java.util.{Map => JMap} + +import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.traversal.{Converter, Traversal} +import play.api.libs.json.{JsObject, JsValue} + +trait BaseRenderer[A] { + + type JsConverter = Converter[JsObject, JMap[String, Any]] + type JsTraversal = Traversal[JsObject, JMap[String, Any], JsConverter] + def baseRenderer( + extraData: Set[String], + traversal: Traversal.V[A], + mapping: (JsTraversal, String) => JsTraversal + ): JsTraversal = { + if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) + else { + val dataName = extraData.toSeq + dataName.foldLeft[JsTraversal]( + traversal.onRawMap[JsObject, JMap[String, Any], JsConverter](_.project(dataName.head, dataName.tail: _*))(_ => + JsObject.empty + ) + )(mapping) + } + } + + def addData[G](name: String, jsTraversal: JsTraversal)(f: Traversal.V[A] => Traversal[JsValue, G, Converter[JsValue, G]])(implicit traversal: Traversal.V[A]): JsTraversal = { + val dataTraversal = f(traversal.start) + jsTraversal.onRawMap[JsObject, JMap[String, Any], JsConverter](_.by(dataTraversal.raw)) { jmap => + jsTraversal.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) + } + } + +} diff --git a/thehive/app/org/thp/thehive/controllers/v1/CaseRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/CaseRenderer.scala index 87e052d53b..fbfeacdb1a 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/CaseRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/CaseRenderer.scala @@ -1,8 +1,5 @@ package org.thp.thehive.controllers.v1 -import java.lang.{Long => JLong} -import java.util.{Collection => JCollection, List => JList, Map => JMap} - import org.apache.tinkerpop.gremlin.structure.Vertex import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.traversal.TraversalOps._ @@ -14,7 +11,10 @@ import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.TaskOps._ import play.api.libs.json._ -trait CaseRenderer { +import java.lang.{Long => JLong} +import java.util.{Collection => JCollection, List => JList, Map => JMap} + +trait CaseRenderer extends BaseRenderer[Case] { def observableStats(implicit authContext: AuthContext): Traversal.V[Case] => Traversal[JsValue, JLong, Converter[JsValue, JLong]] = _.share @@ -56,37 +56,21 @@ trait CaseRenderer { def permissions(implicit authContext: AuthContext): Traversal.V[Case] => Traversal[JsValue, Vertex, Converter[JsValue, Vertex]] = _.userPermissions.domainMap(permissions => Json.toJson(permissions)) - def caseStatsRenderer(extraData: Set[String])(implicit - authContext: AuthContext - ): Traversal.V[Case] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => - def addData[G]( - name: String - )(f: Traversal.V[Case] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ - JsObject, - JMap[String, Any] - ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => - val dataTraversal = f(traversal.start) - t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => - t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) - } - } + def actionRequired(implicit authContext: AuthContext): Traversal.V[Case] => Traversal[JsValue, Boolean, Converter[JsValue, Boolean]] = + _.isActionRequired.domainMap(JsBoolean(_)) - if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) - else { - val dataName = extraData.toSeq - dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( - traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => - JsObject.empty - ) - ) { - case (f, "observableStats") => addData("observableStats")(observableStats)(f) - case (f, "taskStats") => addData("taskStats")(taskStats)(f) - case (f, "alerts") => addData("alerts")(alertStats)(f) - case (f, "isOwner") => addData("isOwner")(isOwnerStats)(f) - case (f, "shareCount") => addData("shareCount")(shareCountStats)(f) - case (f, "permissions") => addData("permissions")(permissions)(f) - case (f, _) => f - } - } + def caseStatsRenderer(extraData: Set[String])( + implicit authContext: AuthContext + ): Traversal.V[Case] => JsTraversal = { implicit traversal => + baseRenderer(extraData, traversal, { + case (f, "observableStats") => addData("observableStats", f)(observableStats) + case (f, "taskStats") => addData("taskStats", f)(taskStats) + case (f, "alerts") => addData("alerts", f)(alertStats) + case (f, "isOwner") => addData("isOwner", f)(isOwnerStats) + case (f, "shareCount") => addData("shareCount", f)(shareCountStats) + case (f, "permissions") => addData("permissions", f)(permissions) + case (f, "actionRequired") => addData("actionRequired", f)(actionRequired) + case (f, _) => f + }) } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/CaseTemplateCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/CaseTemplateCtrl.scala index 34b55c6403..ac31a52866 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/CaseTemplateCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/CaseTemplateCtrl.scala @@ -9,7 +9,7 @@ import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.dto.v1.InputCaseTemplate -import org.thp.thehive.models.{CaseTemplate, Permissions, RichCaseTemplate} +import org.thp.thehive.models.{CaseTemplate, Permissions, RichCaseTemplate, Task} import org.thp.thehive.services.CaseTemplateOps._ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.{CaseTemplateSrv, OrganisationSrv} @@ -41,8 +41,10 @@ class CaseTemplateCtrl @Inject() ( FieldsParser[OutputParam], (range, caseTemplateSteps, _) => caseTemplateSteps.richPage(range.from, range.to, range.extraData.contains("total"))(_.richCaseTemplate) ) - override val outputQuery: Query = Query.output[RichCaseTemplate, Traversal.V[CaseTemplate]](_.richCaseTemplate) - override val extraQueries: Seq[ParamQuery[_]] = Seq() + override val outputQuery: Query = Query.output[RichCaseTemplate, Traversal.V[CaseTemplate]](_.richCaseTemplate) + override val extraQueries: Seq[ParamQuery[_]] = Seq( + Query[Traversal.V[CaseTemplate], Traversal.V[Task]]("tasks", (caseTemplateSteps, _) => caseTemplateSteps.tasks) + ) def create: Action[AnyContent] = entrypoint("create case template") diff --git a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala index 33e3fae52e..ada61aef97 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/DescribeCtrl.scala @@ -2,9 +2,8 @@ package org.thp.thehive.controllers.v1 import java.lang.{Boolean => JBoolean} import java.util.Date - import javax.inject.{Inject, Named, Singleton} -import org.thp.scalligraph.NotFoundError +import org.thp.scalligraph.{EntityId, NotFoundError} import org.thp.scalligraph.controllers.Entrypoint import org.thp.scalligraph.models.Database import org.thp.scalligraph.query.PublicProperty @@ -212,6 +211,7 @@ class DescribeCtrl @Inject() ( case c if c == classOf[Hash] => Seq(PropertyDescription(prop.propertyName, "string")) case c if classOf[Number].isAssignableFrom(c) => Seq(PropertyDescription(prop.propertyName, "number")) case c if c == classOf[String] => Seq(PropertyDescription(prop.propertyName, "string")) + case c if c == classOf[EntityId] => Seq(PropertyDescription(prop.propertyName, "string")) case _ => logger.warn(s"Unrecognized property $prop. Add a custom description") Seq(PropertyDescription(prop.propertyName, "unknown")) diff --git a/thehive/app/org/thp/thehive/controllers/v1/LogCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/LogCtrl.scala index fcd7e2be74..2627780719 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/LogCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/LogCtrl.scala @@ -59,10 +59,8 @@ class LogCtrl @Inject() ( .get(EntityIdOrName(taskId)) .can(Permissions.manageTask) .getOrFail("Task") - createdLog <- logSrv.create(inputLog.toLog, task) - attachment <- inputLog.attachment.map(logSrv.addAttachment(createdLog, _)).flip - richLog = RichLog(createdLog, attachment.toList) - } yield Results.Created(richLog.toJson) + createdLog <- logSrv.create(inputLog.toLog, task, inputLog.attachment) + } yield Results.Created(createdLog.toJson) } def update(logId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/controllers/v1/LogRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/LogRenderer.scala index 6b160c1635..5d234c9523 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/LogRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/LogRenderer.scala @@ -14,7 +14,7 @@ import org.thp.thehive.services.LogOps._ import org.thp.thehive.services.TaskOps._ import play.api.libs.json._ -trait LogRenderer { +trait LogRenderer extends BaseRenderer[Log] { def caseParent(implicit authContext: AuthContext @@ -35,35 +35,15 @@ trait LogRenderer { def actionCount: Traversal.V[Log] => Traversal[JsValue, JLong, Converter[JsValue, JLong]] = _.in("ActionContext").count.domainMap(JsNumber(_)) - def logStatsRenderer(extraData: Set[String])(implicit - authContext: AuthContext - ): Traversal.V[Log] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => - def addData[G]( - name: String - )(f: Traversal.V[Log] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ - JsObject, - JMap[String, Any] - ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => - val dataTraversal = f(traversal.start) - t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => - t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) - } - } - - if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) - else { - val dataName = extraData.toSeq - dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( - traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => - JsObject.empty - ) - ) { - case (f, "case") => addData("case")(caseParent)(f) - case (f, "task") => addData("task")(taskParent)(f) - case (f, "taskId") => addData("taskId")(taskParentId)(f) - case (f, "actionCount") => addData("actionCount")(actionCount)(f) - case (f, _) => f - } - } + def logStatsRenderer(extraData: Set[String])( + implicit authContext: AuthContext + ): Traversal.V[Log] => JsTraversal = { implicit traversal => + baseRenderer(extraData, traversal, { + case (f, "case") => addData("case", f)(caseParent) + case (f, "task") => addData("task", f)(taskParent) + case (f, "taskId") => addData("taskId", f)(taskParentId) + case (f, "actionCount") => addData("actionCount", f)(actionCount) + case (f, _) => f + }) } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala index f383a7a025..e41f8822f7 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableCtrl.scala @@ -1,20 +1,16 @@ package org.thp.thehive.controllers.v1 -import java.io.FilterInputStream -import java.nio.file.Files - -import javax.inject.{Inject, Named, Singleton} import net.lingala.zip4j.ZipFile import net.lingala.zip4j.model.FileHeader import org.thp.scalligraph._ import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ -import org.thp.scalligraph.models.Database +import org.thp.scalligraph.models.{Database, Entity} import org.thp.scalligraph.query.{ParamQuery, PropertyUpdater, PublicProperties, Query} import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal.{IteratorOutput, Traversal} import org.thp.thehive.controllers.v1.Conversion._ -import org.thp.thehive.dto.v1.InputObservable +import org.thp.thehive.dto.v1.{InputAttachment, InputObservable} import org.thp.thehive.models._ import org.thp.thehive.services.CaseOps._ import org.thp.thehive.services.ObservableOps._ @@ -22,10 +18,15 @@ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services._ import play.api.libs.Files.DefaultTemporaryFileCreator +import play.api.libs.json.{JsArray, JsValue, Json} import play.api.mvc.{Action, AnyContent, Results} import play.api.{Configuration, Logger} +import java.io.FilterInputStream +import java.nio.file.Files +import javax.inject.{Inject, Named, Singleton} import scala.collection.JavaConverters._ +import scala.util.{Failure, Success} @Singleton class ObservableCtrl @Inject() ( @@ -36,6 +37,8 @@ class ObservableCtrl @Inject() ( observableTypeSrv: ObservableTypeSrv, caseSrv: CaseSrv, organisationSrv: OrganisationSrv, + attachmentSrv: AttachmentSrv, + errorHandler: ErrorHandler, temporaryFileCreator: DefaultTemporaryFileCreator, configuration: Configuration ) extends QueryableCtrl @@ -75,7 +78,8 @@ class ObservableCtrl @Inject() ( "similar", (observableSteps, authContext) => observableSteps.filteredSimilar.visible(authContext) ), - Query[Traversal.V[Observable], Traversal.V[Case]]("case", (observableSteps, _) => observableSteps.`case`) + Query[Traversal.V[Observable], Traversal.V[Case]]("case", (observableSteps, _) => observableSteps.`case`), + Query[Traversal.V[Observable], Traversal.V[Alert]]("alert", (observableSteps, _) => observableSteps.alert) ) def create(caseId: String): Action[AnyContent] = @@ -83,35 +87,79 @@ class ObservableCtrl @Inject() ( .extract("artifact", FieldsParser[InputObservable]) .extract("isZip", FieldsParser.boolean.optional.on("isZip")) .extract("zipPassword", FieldsParser.string.optional.on("zipPassword")) - .authTransaction(db) { implicit request => implicit graph => + .auth { implicit request => + val inputObservable: InputObservable = request.body("artifact") val isZip: Option[Boolean] = request.body("isZip") val zipPassword: Option[String] = request.body("zipPassword") - val inputObservable: InputObservable = request.body("artifact") val inputAttachObs = if (isZip.contains(true)) getZipFiles(inputObservable, zipPassword) else Seq(inputObservable) - for { - case0 <- - caseSrv - .get(EntityIdOrName(caseId)) - .can(Permissions.manageObservable) - .getOrFail("Case") - observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType)) - observablesWithData <- - inputObservable - .data - .toTry(d => observableSrv.create(inputObservable.toObservable, observableType, d, inputObservable.tags, Nil)) - observableWithAttachment <- inputAttachObs.toTry( - _.attachment - .map(a => observableSrv.create(inputObservable.toObservable, observableType, a, inputObservable.tags, Nil)) - .flip - ) - createdObservables <- (observablesWithData ++ observableWithAttachment.flatten).toTry { richObservables => - caseSrv - .addObservable(case0, richObservables) - .map(_ => richObservables) + + db + .roTransaction { implicit graph => + for { + case0 <- + caseSrv + .get(EntityIdOrName(caseId)) + .can(Permissions.manageObservable) + .orFail(AuthorizationError("Operation not permitted")) + observableType <- observableTypeSrv.getOrFail(EntityName(inputObservable.dataType)) + } yield (case0, observableType) + } + .map { + case (case0, observableType) => + val (successes, failures) = inputAttachObs + .flatMap { obs => + obs.attachment.map(createAttachmentObservable(case0, obs, observableType, _)) ++ + obs.data.map(createSimpleObservable(case0, obs, observableType, _)) + } + .foldLeft[(Seq[JsValue], Seq[JsValue])]((Nil, Nil)) { + case ((s, f), Right(o)) => (s :+ o, f) + case ((s, f), Left(o)) => (s, f :+ o) + } + if (failures.isEmpty) Results.Created(JsArray(successes)) + else Results.MultiStatus(Json.obj("success" -> successes, "failure" -> failures)) } - } yield Results.Created(createdObservables.toJson) } + def createSimpleObservable( + `case`: Case with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + data: String + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + observableSrv + .create(inputObservable.toObservable, observableType, data, inputObservable.tags, Nil) + .flatMap(o => caseSrv.addObservable(`case`, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case Failure(error) => Left(errorHandler.toErrorResult(error)._2 ++ Json.obj("object" -> Json.obj("data" -> data))) + } + + def createAttachmentObservable( + `case`: Case with Entity, + inputObservable: InputObservable, + observableType: ObservableType with Entity, + fileOrAttachment: Either[FFile, InputAttachment] + )(implicit authContext: AuthContext): Either[JsValue, JsValue] = + db + .tryTransaction { implicit graph => + val observable = fileOrAttachment match { + case Left(file) => observableSrv.create(inputObservable.toObservable, observableType, file, inputObservable.tags, Nil) + case Right(attachment) => + for { + attach <- attachmentSrv.duplicate(attachment.name, attachment.contentType, attachment.id) + obs <- observableSrv.create(inputObservable.toObservable, observableType, attach, inputObservable.tags, Nil) + } yield obs + } + observable.flatMap(o => caseSrv.addObservable(`case`, o).map(_ => o)) + } match { + case Success(o) => Right(o.toJson) + case _ => + val filename = fileOrAttachment.fold(_.filename, _.name) + Left(Json.obj("object" -> Json.obj("data" -> s"file:$filename", "attachment" -> Json.obj("name" -> filename)))) + } + def get(observableId: String): Action[AnyContent] = entryPoint("get observable") .authRoTransaction(db) { _ => implicit graph => @@ -196,8 +244,8 @@ class ObservableCtrl @Inject() ( } } - private def getZipFiles(observable: InputObservable, zipPassword: Option[String])(implicit authContext: AuthContext): Seq[InputObservable] = - observable.attachment.toSeq.flatMap { attachment => + private def getZipFiles(observable: InputObservable, zipPassword: Option[String]): Seq[InputObservable] = + observable.attachment.flatMap(_.swap.toSeq).flatMap { attachment => val zipFile = new ZipFile(attachment.filepath.toFile) val files: Seq[FileHeader] = zipFile.getFileHeaders.asScala.asInstanceOf[Seq[FileHeader]] @@ -207,6 +255,6 @@ class ObservableCtrl @Inject() ( files .filterNot(_.isDirectory) .flatMap(extractAndCheckSize(zipFile, _)) - .map(ffile => observable.copy(attachment = Some(ffile))) + .map(ffile => observable.copy(attachment = Seq(Left(ffile)))) } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/ObservableRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/ObservableRenderer.scala index 8bef007b51..603670c0a0 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/ObservableRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/ObservableRenderer.scala @@ -15,7 +15,7 @@ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import play.api.libs.json._ -trait ObservableRenderer { +trait ObservableRenderer extends BaseRenderer[Observable] { def seenStats(implicit authContext: AuthContext @@ -52,38 +52,17 @@ trait ObservableRenderer { def permissions(implicit authContext: AuthContext): Traversal.V[Observable] => Traversal[JsValue, Vertex, Converter[JsValue, Vertex]] = _.userPermissions.domainMap(permissions => Json.toJson(permissions)) - def observableStatsRenderer( - extraData: Set[String] - )(implicit authContext: AuthContext): Traversal.V[Observable] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { - traversal => - def addData[G]( - name: String - )(f: Traversal.V[Observable] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ - JsObject, - JMap[String, Any] - ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => - val dataTraversal = f(traversal.start) - t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => - t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) - } - } - - if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) - else { - val dataName = extraData.toSeq - dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( - traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => - JsObject.empty - ) - ) { - case (f, "seen") => addData("seen")(seenStats)(f) - case (f, "shares") => addData("shares")(sharesStats)(f) - case (f, "links") => addData("links")(observableLinks)(f) - case (f, "permissions") => addData("permissions")(permissions)(f) - case (f, "isOwner") => addData("isOwner")(isOwner)(f) - case (f, "shareCount") => addData("shareCount")(shareCount)(f) - case (f, _) => f - } - } + def observableStatsRenderer(extraData: Set[String])( + implicit authContext: AuthContext + ): Traversal.V[Observable] => JsTraversal = { implicit traversal => + baseRenderer(extraData, traversal, { + case (f, "seen") => addData("seen", f)(seenStats) + case (f, "shares") => addData("shares", f)(sharesStats) + case (f, "links") => addData("links", f)(observableLinks) + case (f, "permissions") => addData("permissions", f)(permissions) + case (f, "isOwner") => addData("isOwner", f)(isOwner) + case (f, "shareCount") => addData("shareCount", f)(shareCount) + case (f, _) => f + }) } } diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index a41f6a537d..fae8188f4e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -193,6 +193,10 @@ class Properties @Inject() ( .property("pap", UMapping.int)(_.field.updatable) .property("status", UMapping.enum[CaseStatus.type])(_.field.updatable) .property("summary", UMapping.string.optional)(_.field.updatable) + .property("actionRequired", UMapping.boolean)(_ + .authSelect((t, auth) => t.isActionRequired(auth)) + .readonly + ) .property("assignee", UMapping.string.optional)(_.select(_.user.value(_.login)).custom { (_, login, vertex, _, graph, authContext) => for { c <- caseSrv.get(vertex)(graph).getOrFail("Case") @@ -429,6 +433,12 @@ class Properties @Inject() ( } .map(_ => Json.obj("assignee" -> value)) }) + .property("actionRequired", UMapping.boolean)(_ + .authSelect((t, authContext) => { + t.actionRequired(authContext) + }) + .readonly + ) .build lazy val log: PublicProperties = diff --git a/thehive/app/org/thp/thehive/controllers/v1/Router.scala b/thehive/app/org/thp/thehive/controllers/v1/Router.scala index feffe865bb..b236bc4f8b 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Router.scala @@ -68,10 +68,13 @@ class Router @Inject() ( // case GET(p"/share/$shareId") => shareCtrl.get(shareId) // case PATCH(p"/share/$shareId") => shareCtrl.update(shareId) - case GET(p"/task") => taskCtrl.list - case POST(p"/task") => taskCtrl.create - case GET(p"/task/$taskId") => taskCtrl.get(taskId) - case PATCH(p"/task/$taskId") => taskCtrl.update(taskId) + case GET(p"/task") => taskCtrl.list + case POST(p"/task") => taskCtrl.create + case GET(p"/task/$taskId") => taskCtrl.get(taskId) + case PATCH(p"/task/$taskId") => taskCtrl.update(taskId) + case GET(p"/task/$taskId/actionRequired") => taskCtrl.isActionRequired(taskId) + case PUT(p"/task/$taskId/actionRequired/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = true) + case PUT(p"/task/$taskId/actionDone/$orgaId") => taskCtrl.actionRequired(taskId, orgaId, required = false) // POST /case/:caseId/task/_search controllers.TaskCtrl.findInCase(caseId) // POST /case/task/_stats controllers.TaskCtrl.stats() diff --git a/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala index 3930124ecd..1289dc0e89 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala @@ -1,23 +1,50 @@ package org.thp.thehive.controllers.v1 -import javax.inject.{Inject, Singleton} +import akka.actor.ActorSystem +import akka.cluster.ClusterEvent.CurrentClusterState +import akka.cluster.{Cluster, Member} import org.thp.scalligraph.ScalligraphApplicationLoader import org.thp.scalligraph.auth.{AuthCapability, AuthSrv, MultiAuthSrv} import org.thp.scalligraph.controllers.Entrypoint +import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationFormat import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} import org.thp.thehive.TheHiveModule -import play.api.libs.json.{JsObject, JsString, Json} +import play.api.libs.json.{JsObject, JsString, Json, Writes} import play.api.mvc.{AbstractController, Action, AnyContent, Results} +import javax.inject.{Inject, Singleton} +import scala.concurrent.duration.FiniteDuration import scala.util.Success @Singleton -class StatusCtrl @Inject() (entrypoint: Entrypoint, appConfig: ApplicationConfig, authSrv: AuthSrv) { +class StatusCtrl @Inject() (entrypoint: Entrypoint, appConfig: ApplicationConfig, authSrv: AuthSrv, system: ActorSystem) { private def getVersion(c: Class[_]): String = Option(c.getPackage.getImplementationVersion).getOrElse("SNAPSHOT") val passwordConfig: ConfigItem[String, String] = appConfig.item[String]("datastore.attachment.password", "Password used to protect attachment ZIP") def password: String = passwordConfig.get + val streamPollingDurationConfig: ConfigItem[FiniteDuration, FiniteDuration] = + appConfig.item[FiniteDuration]("stream.longPolling.pollingDuration", "amount of time the UI have to wait before polling the stream") + def streamPollingDuration: FiniteDuration = streamPollingDurationConfig.get + val cluster: Cluster = Cluster(system) + + implicit val memberWrites: Writes[Member] = Writes[Member] { member => + Json.obj( + "address" -> member.uniqueAddress.address.toString, + "status" -> member.status.toString, + "roles" -> member.roles + ) + } + implicit val clusterStateWrites: Writes[CurrentClusterState] = Writes[CurrentClusterState] { state => + Json.obj( + "members" -> state.members, + "unreachable" -> state.unreachable, + "seenBy" -> state.seenBy.map(_.toString), + "leader" -> state.leader.map(_.toString), + "unreachableDataCenters" -> state.unreachableDataCenters + //"roleLeaderMap" -> state.roleLeaderMap, + ) + } def get: Action[AnyContent] = entrypoint("status") { _ => @@ -36,9 +63,11 @@ class StatusCtrl @Inject() (entrypoint: Entrypoint, appConfig: ApplicationConfig case multiAuthSrv: MultiAuthSrv => Json.toJson(multiAuthSrv.providerNames) case _ => JsString(authSrv.name) }), - "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), - "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso) - ) + "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), + "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso), + "pollingDuration" -> streamPollingDuration.toMillis + ), + "cluster" -> cluster.state ) ) ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala index 6ffdbb1b81..55be07a869 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaskCtrl.scala @@ -1,6 +1,5 @@ package org.thp.thehive.controllers.v1 -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.EntityIdOrName import org.thp.scalligraph.controllers.{Entrypoint, FieldsParser} import org.thp.scalligraph.models.Database @@ -17,6 +16,7 @@ import org.thp.thehive.services.TaskOps._ import org.thp.thehive.services.{CaseSrv, OrganisationSrv, ShareSrv, TaskSrv} import play.api.mvc.{Action, AnyContent, Results} +import javax.inject.{Inject, Named, Singleton} import scala.util.Success @Singleton @@ -48,7 +48,8 @@ class TaskCtrl @Inject() ( FieldsParser[EntityIdOrName], (idOrName, graph, authContext) => taskSrv.get(idOrName)(graph).visible(authContext) ) - override val outputQuery: Query = Query.output[RichTask, Traversal.V[Task]](_.richTask) + override val outputQuery: Query = + Query.outputWithContext[RichTask, Traversal.V[Task]]((taskSteps, _) => taskSteps.richTask) override val extraQueries: Seq[ParamQuery[_]] = Seq( Query.init[Traversal.V[Task]]( "waitingTask", @@ -57,6 +58,7 @@ class TaskCtrl @Inject() ( Query[Traversal.V[Task], Traversal.V[User]]("assignableUsers", (taskSteps, authContext) => taskSteps.assignableUsers(authContext)), Query[Traversal.V[Task], Traversal.V[Log]]("logs", (taskSteps, _) => taskSteps.logs), Query[Traversal.V[Task], Traversal.V[Case]]("case", (taskSteps, _) => taskSteps.`case`), + Query[Traversal.V[Task], Traversal.V[CaseTemplate]]("caseTemplate", (taskSteps, _) => taskSteps.caseTemplate), Query[Traversal.V[Task], Traversal.V[Organisation]]("organisations", (taskSteps, authContext) => taskSteps.organisations.visible(authContext)) ) @@ -110,4 +112,22 @@ class TaskCtrl @Inject() ( ) .map(_ => Results.NoContent) } + + def isActionRequired(taskId: String): Action[AnyContent] = + entrypoint("is action required") + .authTransaction(db){ implicit request => implicit graph => + val actionTraversal = taskSrv.get(EntityIdOrName(taskId)).visible.actionRequiredMap + Success(Results.Ok(actionTraversal.toSeq.toMap.toJson)) + } + + def actionRequired(taskId: String, orgaId: String, required: Boolean): Action[AnyContent] = + entrypoint("action required") + .authTransaction(db){ implicit request => implicit graph => + for { + organisation <- organisationSrv.get(EntityIdOrName(orgaId)).visible.getOrFail("Organisation") + task <- taskSrv.get(EntityIdOrName(taskId)).visible.getOrFail("Task") + _ <- taskSrv.actionRequired(task, organisation, required) + } yield Results.NoContent + } + } diff --git a/thehive/app/org/thp/thehive/controllers/v1/TaskRenderer.scala b/thehive/app/org/thp/thehive/controllers/v1/TaskRenderer.scala index da24a7ef58..14b3d37f0e 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/TaskRenderer.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/TaskRenderer.scala @@ -1,6 +1,6 @@ package org.thp.thehive.controllers.v1 -import java.lang.{Long => JLong} +import java.lang.{Long => JLong, Boolean => JBoolean} import java.util.{List => JList, Map => JMap} import org.apache.tinkerpop.gremlin.structure.Vertex @@ -15,7 +15,7 @@ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TaskOps._ import play.api.libs.json._ -trait TaskRenderer { +trait TaskRenderer extends BaseRenderer[Task] { def caseParent(implicit authContext: AuthContext @@ -37,37 +37,26 @@ trait TaskRenderer { def isOwner(implicit authContext: AuthContext): Traversal.V[Task] => Traversal[JsValue, JList[Vertex], Converter[JsValue, JList[Vertex]]] = _.origin.get(authContext.organisation).fold.domainMap(l => JsBoolean(l.nonEmpty)) - def taskStatsRenderer(extraData: Set[String])(implicit - authContext: AuthContext - ): Traversal.V[Task] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { traversal => - def addData[G]( - name: String - )(f: Traversal.V[Task] => Traversal[JsValue, G, Converter[JsValue, G]]): Traversal[JsObject, JMap[String, Any], Converter[ - JsObject, - JMap[String, Any] - ]] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = { t => - val dataTraversal = f(traversal.start) - t.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.by(dataTraversal.raw)) { jmap => - t.converter(jmap) + (name -> dataTraversal.converter(jmap.get(name).asInstanceOf[G])) - } - } - - if (extraData.isEmpty) traversal.constant2[JsObject, JMap[String, Any]](JsObject.empty) - else { - val dataName = extraData.toSeq - dataName.foldLeft[Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]]]( - traversal.onRawMap[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]](_.project(dataName.head, dataName.tail: _*))(_ => - JsObject.empty - ) - ) { - case (f, "case") => addData("case")(caseParent)(f) - case (f, "caseId") => addData("caseId")(caseParentId)(f) - case (f, "caseTemplate") => addData("caseTemplate")(caseTemplateParent)(f) - case (f, "caseTemplateId") => addData("caseTemplateId")(caseTemplateParentId)(f) - case (f, "isOwner") => addData("isOwner")(isOwner)(f) - case (f, "shareCount") => addData("shareCount")(shareCount)(f) - case (f, _) => f - } - } + def actionRequired(implicit authContext: AuthContext): Traversal.V[Task] => Traversal[JsValue, JBoolean, Converter[JsValue, JBoolean]] = + _.actionRequired.domainMap(JsBoolean(_)) + + def actionRequiredMap(implicit authContext: AuthContext): + Traversal.V[Task] => Traversal[JsValue, JList[JMap[String, Any]], Converter[JsValue, JList[JMap[String, Any]]]] = + _.actionRequiredMap.fold.domainMap(_.toMap.toJson) + + def taskStatsRenderer(extraData: Set[String])( + implicit authContext: AuthContext + ): Traversal.V[Task] => JsTraversal = { implicit traversal => + baseRenderer(extraData, traversal, { + case (f, "case") => addData("case", f)(caseParent) + case (f, "caseId") => addData("caseId", f)(caseParentId) + case (f, "caseTemplate") => addData("caseTemplate", f)(caseTemplateParent) + case (f, "caseTemplateId") => addData("caseTemplateId", f)(caseTemplateParentId) + case (f, "isOwner") => addData("isOwner", f)(isOwner) + case (f, "shareCount") => addData("shareCount", f)(shareCount) + case (f, "actionRequired") => addData("actionRequired", f)(actionRequired) + case (f, "actionRequiredMap") => addData("actionRequiredMap", f)(actionRequiredMap) + case (f, _) => f + }) } } diff --git a/thehive/app/org/thp/thehive/models/SchemaUpdaterActor.scala b/thehive/app/org/thp/thehive/models/SchemaUpdaterActor.scala index 1823d030ae..f9edf6ea0f 100644 --- a/thehive/app/org/thp/thehive/models/SchemaUpdaterActor.scala +++ b/thehive/app/org/thp/thehive/models/SchemaUpdaterActor.scala @@ -4,16 +4,15 @@ import akka.actor.{Actor, ActorRef, ActorSystem, PoisonPill, Props} import akka.cluster.singleton.{ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings} import akka.pattern.ask import akka.util.Timeout -import javax.inject.{Inject, Provider, Singleton} import org.thp.scalligraph.janus.JanusDatabase import org.thp.scalligraph.models.Database import org.thp.thehive.ClusterSetup import org.thp.thehive.services.LocalUserSrv import play.api.{Configuration, Logger} +import javax.inject.{Inject, Provider, Singleton} import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.concurrent.{Await, ExecutionContext} -import scala.util.{Failure, Try} @Singleton class DatabaseProvider @Inject() ( @@ -23,7 +22,6 @@ class DatabaseProvider @Inject() ( actorSystem: ActorSystem, clusterSetup: ClusterSetup // this dependency is here to ensure that cluster setup is finished ) extends Provider[Database] { - import SchemaUpdaterActor._ lazy val dbInitialisationTimeout: FiniteDuration = configuration.get[FiniteDuration]("db.initialisationTimeout") lazy val schemaUpdaterActor: ActorRef = { @@ -54,27 +52,25 @@ class DatabaseProvider @Inject() ( override def get(): Database = { implicit val timeout: Timeout = Timeout(dbInitialisationTimeout) - Await.result(schemaUpdaterActor ? RequestDBStatus(databaseInstance), timeout.duration) match { - case DBStatus(status) => - status.get // if the status is a failure, throw an exception. - database.asInstanceOf[Database] + Await.result(schemaUpdaterActor ? RequestDB(databaseInstance), timeout.duration) match { + case DBReady => database.asInstanceOf[Database] } } } -object SchemaUpdaterActor { - case class RequestDBStatus(databaseInstanceId: String) - case class DBStatus(status: Try[Unit]) -} +sealed trait SchemaUpdaterMessage +case class RequestDB(databaseInstanceId: String) extends SchemaUpdaterMessage +case object DBReady extends SchemaUpdaterMessage +case object Update extends SchemaUpdaterMessage class SchemaUpdaterActor @Inject() (theHiveSchema: TheHiveSchemaDefinition, database: Database) extends Actor { - import SchemaUpdaterActor._ + lazy val logger: Logger = Logger(getClass) - final case object Update + implicit val ec: ExecutionContext = context.dispatcher var originalConnectionIds: Set[String] = Set.empty - def update(): Try[Unit] = { + def update(): Unit = { theHiveSchema .update(database)(LocalUserSrv.getSystemAuthContext) .map(_ => logger.info("Database is up-to-date")) @@ -82,7 +78,10 @@ class SchemaUpdaterActor @Inject() (theHiveSchema: TheHiveSchemaDefinition, data case error => logger.error(s"Database with TheHiveSchema schema update failure", error) } logger.info("Install eventual missing indexes") - database.addSchemaIndexes(theHiveSchema) + database.addSchemaIndexes(theHiveSchema).recover { + case error => logger.error(s"Database with TheHiveSchema index update failure", error) + } + () } override def preStart(): Unit = { @@ -94,6 +93,7 @@ class SchemaUpdaterActor @Inject() (theHiveSchema: TheHiveSchemaDefinition, data } def hasUnknownConnections(instanceIds: Set[String]): Boolean = (originalConnectionIds -- instanceIds).nonEmpty + def dropUnknownConnections(instanceIds: Set[String]): Unit = database match { case jdb: JanusDatabase => jdb.dropConnections((originalConnectionIds -- instanceIds).toSeq) @@ -101,38 +101,30 @@ class SchemaUpdaterActor @Inject() (theHiveSchema: TheHiveSchemaDefinition, data } override def receive: Receive = { - case RequestDBStatus(instanceId) => + case RequestDB(instanceId) => val instanceIds = Set(instanceId) if (hasUnknownConnections(instanceIds)) { logger.info("Database has unknown connections, wait 5 seconds for full cluster initialisation") context.system.scheduler.scheduleOnce(5.seconds, self, Update) - context.become(receive(Failure(new Exception("Update delayed")), instanceIds, Seq(sender))) + context.become(receive(instanceIds, Seq(sender))) } else { logger.info("Database is ready to be updated") - val status = update() - sender ! DBStatus(status) - context.become(receive(status, instanceIds, Nil)) + update() + sender ! DBReady + context.become(receive(instanceIds, Nil)) } - } - def receive(status: Try[Unit], instanceIds: Set[String], waitingClients: Seq[ActorRef]): Receive = { - case RequestDBStatus(instanceId) if waitingClients.nonEmpty => - context.become(receive(status, instanceIds + instanceId, waitingClients :+ sender)) - case RequestDBStatus(_) => - status.fold( - { _ => - logger.info("Retry to update database") - val newStatus = update() - sender ! DBStatus(newStatus) - context.become(receive(newStatus, instanceIds, waitingClients)) - }, - _ => sender ! DBStatus(status) - ) - case Update => - logger.info("Drop unknown connections and update the database") - dropUnknownConnections(instanceIds) - val newStatus = update() - waitingClients.foreach(_ ! DBStatus(newStatus)) - context.become(receive(newStatus, instanceIds, Nil)) + def receive(instanceIds: Set[String], waitingClients: Seq[ActorRef]): Receive = { + case RequestDB(instanceId) if waitingClients.nonEmpty => + context.become(receive(instanceIds + instanceId, waitingClients :+ sender)) + case RequestDB(_) => + sender ! DBReady + case Update => + logger.info("Drop unknown connections and update the database") + dropUnknownConnections(instanceIds) + update() + waitingClients.foreach(_ ! DBReady) + context.become(receive(instanceIds, Nil)) + } } } diff --git a/thehive/app/org/thp/thehive/models/SchemaUpdaterSerializer.scala b/thehive/app/org/thp/thehive/models/SchemaUpdaterSerializer.scala new file mode 100644 index 0000000000..8a0ba3ea82 --- /dev/null +++ b/thehive/app/org/thp/thehive/models/SchemaUpdaterSerializer.scala @@ -0,0 +1,27 @@ +package org.thp.thehive.models + +import akka.serialization.Serializer + +import java.io.NotSerializableException + +class SchemaUpdaterSerializer extends Serializer { + override def identifier: Int = 272437668 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case RequestDB(instanceId) => 0.toByte +: instanceId.getBytes + case DBReady => Array(1) + case Update => Array(2) + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => RequestDB(new String(bytes.tail)) + case 1 => DBReady + case 2 => Update + case _ => throw new NotSerializableException + } +} diff --git a/thehive/app/org/thp/thehive/models/Share.scala b/thehive/app/org/thp/thehive/models/Share.scala index 0b5fc13646..fb54294efa 100644 --- a/thehive/app/org/thp/thehive/models/Share.scala +++ b/thehive/app/org/thp/thehive/models/Share.scala @@ -15,7 +15,7 @@ case class ShareCase() case class ShareObservable() @BuildEdgeEntity[Share, Task] -case class ShareTask() +case class ShareTask(actionRequired: Boolean = false) @BuildEdgeEntity[Share, Profile] case class ShareProfile() @@ -28,8 +28,3 @@ case class RichShare(share: Share with Entity, caseId: EntityId, organisationNam def _updatedAt: Option[Date] = share._updatedAt def owner: Boolean = share.owner } - -//object RichShare { -// def apply(`case`: Case with Entity, organisation: Organisation with Entity, profile: Profile with Entity): RichShare = -// RichShare(`case`._id, organisation.name, profile.permissions) -//} diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index eeab7f15fd..da0fe21586 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -31,7 +31,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { val operations: Operations = Operations(name) .addProperty[Option[Boolean]]("Observable", "seen") .updateGraph("Add manageConfig permission to org-admin profile", "Profile") { traversal => - Try(traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageConfig").iterate()) + traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageConfig").iterate() Success(()) } .updateGraph("Remove duplicate custom fields", "CustomField") { traversal => @@ -84,6 +84,11 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { .iterate() Success(()) } + .addProperty[Boolean]("ShareTask", "actionRequired") + .updateGraph("Add actionRequire property", "Share") { traversal => + traversal.outE[ShareTask].raw.property("actionRequired", false).iterate() + Success(()) + } val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index 49c91f328d..0d7de34c4b 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -1,9 +1,5 @@ package org.thp.thehive.services -import java.lang.{Long => JLong} -import java.util.{Date, List => JList, Map => JMap} - -import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.process.traversal.P import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.auth.{AuthContext, Permission} @@ -24,6 +20,9 @@ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ import play.api.libs.json.{JsObject, Json} +import java.lang.{Long => JLong} +import java.util.{Date, List => JList, Map => JMap} +import javax.inject.{Inject, Named, Singleton} import scala.util.{Failure, Success, Try} @Singleton @@ -101,7 +100,10 @@ class AlertSrv @Inject() ( .flatMap(auditSrv.alert.update(_, updatedFields)) } - def updateTags(alert: Alert with Entity, tags: Set[Tag with Entity])(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + def updateTags(alert: Alert with Entity, tags: Set[Tag with Entity])(implicit + graph: Graph, + authContext: AuthContext + ): Try[(Set[Tag with Entity], Set[Tag with Entity])] = { val (tagsToAdd, tagsToRemove) = get(alert) .tags .toIterator @@ -114,11 +116,14 @@ class AlertSrv @Inject() ( _ <- tagsToAdd.toTry(alertTagSrv.create(AlertTag(), alert, _)) _ = get(alert).removeTags(tagsToRemove) _ <- auditSrv.alert.update(alert, Json.obj("tags" -> tags.map(_.toString))) - } yield () + } yield (tagsToAdd, tagsToRemove) } - def updateTagNames(alert: Alert with Entity, tags: Set[String])(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + def updateTagNames(alert: Alert with Entity, tags: Set[String])(implicit + graph: Graph, + authContext: AuthContext + ): Try[(Set[Tag with Entity], Set[Tag with Entity])] = tags.toTry(tagSrv.getOrCreate).flatMap(t => updateTags(alert, t.toSet)) def addTags(alert: Alert with Entity, tags: Set[String])(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { @@ -237,34 +242,36 @@ class AlertSrv @Inject() ( graph: Graph, authContext: AuthContext ): Try[RichCase] = - get(alert.alert).`case`.richCase.getOrFail("Case").orElse { - for { - caseTemplate <- - alert - .caseTemplate - .map(ct => caseTemplateSrv.get(EntityIdOrName(ct)).richCaseTemplate.getOrFail("CaseTemplate")) - .flip - customField = alert.customFields.map(f => InputCustomFieldValue(f.name, f.value, f.order)) - case0 = Case( - number = 0, - title = caseTemplate.flatMap(_.titlePrefix).getOrElse("") + alert.title, - description = alert.description, - severity = alert.severity, - startDate = new Date, - endDate = None, - flag = false, - tlp = alert.tlp, - pap = alert.pap, - status = CaseStatus.Open, - summary = None - ) + auditSrv.mergeAudits { + get(alert.alert).`case`.richCase.getOrFail("Case").orElse { + for { + caseTemplate <- + alert + .caseTemplate + .map(ct => caseTemplateSrv.get(EntityIdOrName(ct)).richCaseTemplate.getOrFail("CaseTemplate")) + .flip + customField = alert.customFields.map(f => InputCustomFieldValue(f.name, f.value, f.order)) + case0 = Case( + number = 0, + title = caseTemplate.flatMap(_.titlePrefix).getOrElse("") + alert.title, + description = alert.description, + severity = alert.severity, + startDate = new Date, + endDate = None, + flag = false, + tlp = alert.tlp, + pap = alert.pap, + status = CaseStatus.Open, + summary = None + ) - createdCase <- caseSrv.create(case0, user, organisation, alert.tags.toSet, customField, caseTemplate, Nil) - _ <- importObservables(alert.alert, createdCase.`case`) - _ <- alertCaseSrv.create(AlertCase(), alert.alert, createdCase.`case`) - _ <- markAsRead(alert._id) - } yield createdCase - } + createdCase <- caseSrv.create(case0, user, organisation, alert.tags.toSet, customField, caseTemplate, Nil) + _ <- importObservables(alert.alert, createdCase.`case`) + _ <- alertCaseSrv.create(AlertCase(), alert.alert, createdCase.`case`) + _ <- markAsRead(alert._id) + } yield createdCase + } + }(richCase => auditSrv.`case`.create(richCase.`case`, richCase.toJson)) def mergeInCase(alertId: EntityIdOrName, caseId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Case with Entity] = for { diff --git a/thehive/app/org/thp/thehive/services/AttachmentSrv.scala b/thehive/app/org/thp/thehive/services/AttachmentSrv.scala index cc3165c5a3..77e39fcd62 100644 --- a/thehive/app/org/thp/thehive/services/AttachmentSrv.scala +++ b/thehive/app/org/thp/thehive/services/AttachmentSrv.scala @@ -1,14 +1,11 @@ package org.thp.thehive.services -import java.io.InputStream -import java.nio.file.Files - import akka.NotUsed import akka.stream.scaladsl.{Source, StreamConverters} import akka.stream.{IOResult, Materializer} import akka.util.ByteString -import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph +import org.thp.scalligraph.NotFoundError import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers.FFile import org.thp.scalligraph.models.{Database, Entity} @@ -20,6 +17,9 @@ import org.thp.thehive.models.Attachment import org.thp.thehive.services.AttachmentOps._ import play.api.Configuration +import java.io.InputStream +import java.nio.file.Files +import javax.inject.{Inject, Named, Singleton} import scala.concurrent.Future import scala.util.Try @@ -61,6 +61,20 @@ class AttachmentSrv @Inject() (configuration: Configuration, storageSrv: Storage storageSrv.saveBinary("attachment", id, data).flatMap(_ => createEntity(Attachment(filename, size, contentType, hs, id))) } + def duplicate(filename: String, contentType: String, attachmentId: String)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Attachment with Entity] = { + val (size, hashes) = getByName(attachmentId).headOption match { + case Some(a) => (a.size, a.hashes) + case None => + val s = storageSrv.getSize("attachment", attachmentId).getOrElse(throw NotFoundError(s"Attachment $attachmentId not found")) + val hs = hashers.fromInputStream(storageSrv.loadBinary("attachment", attachmentId)) + (s, hs) + } + createEntity(Attachment(filename, size, contentType, hashes, attachmentId)) + } + override def getByName(name: String)(implicit graph: Graph): Traversal.V[Attachment] = startTraversal.getByAttachmentId(name) @@ -81,7 +95,7 @@ object AttachmentOps { implicit class AttachmentOpsDefs(traversal: Traversal.V[Attachment]) { def getByAttachmentId(attachmentId: String): Traversal.V[Attachment] = traversal.has(_.attachmentId, attachmentId) - def visible(implicit authContext: AuthContext): Traversal.V[Attachment] = traversal // TODO + def visible: Traversal.V[Attachment] = traversal // TODO } } diff --git a/thehive/app/org/thp/thehive/services/CaseSrv.scala b/thehive/app/org/thp/thehive/services/CaseSrv.scala index d2e2e0c88f..291a1147af 100644 --- a/thehive/app/org/thp/thehive/services/CaseSrv.scala +++ b/thehive/app/org/thp/thehive/services/CaseSrv.scala @@ -1,8 +1,8 @@ package org.thp.thehive.services import java.util.{Map => JMap} - import akka.actor.ActorRef + import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.process.traversal.{Order, P} import org.apache.tinkerpop.gremlin.structure.{Graph, Vertex} @@ -54,7 +54,7 @@ class CaseSrv @Inject() ( override def createEntity(e: Case)(implicit graph: Graph, authContext: AuthContext): Try[Case with Entity] = super.createEntity(e).map { `case` => - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Case") + integrityCheckActor ! EntityAdded("Case") `case` } @@ -176,18 +176,8 @@ class CaseSrv @Inject() ( authContext: AuthContext ): Try[Unit] = { val alreadyExistInThatCase = richObservable - .dataOrAttachment - .fold( - _ => - observableSrv - .get(richObservable.observable) - .filteredSimilar - .visible - .`case` - .hasId(`case`._id) - .exists, - attachment => get(`case`).share.observables.attachments.has(_.attachmentId, attachment.attachmentId).exists - ) || get(`case`).observables.filter(_.hasId(richObservable.observable._id)).exists + .data + .fold(false)(data => get(`case`).observables.data.has(_.data, data.data).exists) if (alreadyExistInThatCase) Failure(CreateError("Observable already exists")) @@ -563,6 +553,10 @@ object CaseOps { .dedup def alert: Traversal.V[Alert] = traversal.in[AlertCase].v[Alert] + + def isActionRequired(implicit authContext: AuthContext): Traversal[Boolean, Boolean, Converter.Identity[Boolean]] = + traversal.choose(_.share(authContext).outE[ShareTask].has(_.actionRequired, true), true, false) + } // implicit class CaseCustomFieldsOpsDefs(traversal: Traversal.E[CaseCustomField]) extends CustomFieldValueOpsDefs(traversal) diff --git a/thehive/app/org/thp/thehive/services/CaseTemplateSrv.scala b/thehive/app/org/thp/thehive/services/CaseTemplateSrv.scala index 22e4b17ad5..00e12667c3 100644 --- a/thehive/app/org/thp/thehive/services/CaseTemplateSrv.scala +++ b/thehive/app/org/thp/thehive/services/CaseTemplateSrv.scala @@ -42,7 +42,7 @@ class CaseTemplateSrv @Inject() ( startTraversal.getByName(name) override def createEntity(e: CaseTemplate)(implicit graph: Graph, authContext: AuthContext): Try[CaseTemplate with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("CaseTemplate") + integrityCheckActor ! EntityAdded("CaseTemplate") super.createEntity(e) } @@ -202,7 +202,7 @@ object CaseTemplateOps { _.by .by(_.organisation.value(_.name)) .by(_.tags.fold) - .by(_.tasks.richTask.fold) + .by(_.tasks.richTaskWithoutActionRequired.fold) .by( _.outE[CaseTemplateCustomField] .as(caseTemplateCustomFieldLabel) diff --git a/thehive/app/org/thp/thehive/services/CustomFieldSrv.scala b/thehive/app/org/thp/thehive/services/CustomFieldSrv.scala index 7a1bffbd75..51be2623bc 100644 --- a/thehive/app/org/thp/thehive/services/CustomFieldSrv.scala +++ b/thehive/app/org/thp/thehive/services/CustomFieldSrv.scala @@ -25,7 +25,7 @@ class CustomFieldSrv @Inject() (auditSrv: AuditSrv, organisationSrv: Organisatio ) extends VertexSrv[CustomField] { override def createEntity(e: CustomField)(implicit graph: Graph, authContext: AuthContext): Try[CustomField with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("CustomField") + integrityCheckActor ! EntityAdded("CustomField") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/DataSrv.scala b/thehive/app/org/thp/thehive/services/DataSrv.scala index b1a75c3a4d..1471129e20 100644 --- a/thehive/app/org/thp/thehive/services/DataSrv.scala +++ b/thehive/app/org/thp/thehive/services/DataSrv.scala @@ -21,7 +21,7 @@ class DataSrv @Inject() (@Named("integrity-check-actor") integrityCheckActor: Ac extends VertexSrv[Data] { override def createEntity(e: Data)(implicit graph: Graph, authContext: AuthContext): Try[Data with Entity] = super.createEntity(e).map { data => - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Data") + integrityCheckActor ! EntityAdded("Data") data } diff --git a/thehive/app/org/thp/thehive/services/FlowActor.scala b/thehive/app/org/thp/thehive/services/FlowActor.scala index 5c3411498a..b92fa907ab 100644 --- a/thehive/app/org/thp/thehive/services/FlowActor.scala +++ b/thehive/app/org/thp/thehive/services/FlowActor.scala @@ -21,15 +21,13 @@ import play.api.cache.SyncCacheApi import scala.concurrent.duration.FiniteDuration -object FlowActor { - case class FlowId(organisation: EntityIdOrName, caseId: Option[EntityIdOrName]) { - override def toString: String = s"$organisation;${caseId.getOrElse("-")}" - } - case class AuditIds(ids: Seq[EntityId]) +sealed trait FlowMessage +case class FlowId(organisation: EntityIdOrName, caseId: Option[EntityIdOrName]) extends FlowMessage { + override def toString: String = s"$organisation;${caseId.getOrElse("-")}" } +case class AuditIds(ids: Seq[EntityId]) extends FlowMessage class FlowActor extends Actor { - import FlowActor._ lazy val injector: Injector = GuiceAkkaExtension(context.system).injector lazy val cache: SyncCacheApi = injector.getInstance(classOf[SyncCacheApi]) @@ -43,6 +41,7 @@ class FlowActor extends Actor { lazy val eventSrv: EventSrv = injector.getInstance(classOf[EventSrv]) override def preStart(): Unit = eventSrv.subscribe(StreamTopic(), self) + override def postStop(): Unit = eventSrv.unsubscribe(StreamTopic(), self) override def receive: Receive = { case flowId @ FlowId(organisation, caseId) => val auditIds = cache.getOrElseUpdate(flowId.toString) { diff --git a/thehive/app/org/thp/thehive/services/FlowSerializer.scala b/thehive/app/org/thp/thehive/services/FlowSerializer.scala new file mode 100644 index 0000000000..9cff3137d6 --- /dev/null +++ b/thehive/app/org/thp/thehive/services/FlowSerializer.scala @@ -0,0 +1,32 @@ +package org.thp.thehive.services + +import akka.serialization.Serializer +import org.thp.scalligraph.{EntityId, EntityIdOrName} + +import java.io.NotSerializableException + +class FlowSerializer extends Serializer { + override def identifier: Int = -1165729876 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case FlowId(organisation, None) => 0.toByte +: organisation.toString.getBytes + case FlowId(organisation, Some(caseId)) => 1.toByte +: s"$organisation|$caseId".getBytes + case AuditIds(ids) => 2.toByte +: ids.map(_.value).mkString("|").getBytes + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => FlowId(EntityIdOrName(new String(bytes.tail)), None) + case 1 => + new String(bytes.tail).split('|') match { + case Array(organisation, caseId) => FlowId(EntityIdOrName(organisation), Some(EntityIdOrName(caseId))) + case _ => throw new NotSerializableException + } + case 2 => AuditIds(new String(bytes.tail).split('|').toSeq.map(EntityId.apply)) + case _ => throw new NotSerializableException + } +} diff --git a/thehive/app/org/thp/thehive/services/ImpactStatusSrv.scala b/thehive/app/org/thp/thehive/services/ImpactStatusSrv.scala index 490ad61f6b..43b9ab1a33 100644 --- a/thehive/app/org/thp/thehive/services/ImpactStatusSrv.scala +++ b/thehive/app/org/thp/thehive/services/ImpactStatusSrv.scala @@ -23,7 +23,7 @@ class ImpactStatusSrv @Inject() (@Named("integrity-check-actor") integrityCheckA startTraversal.getByName(name) override def createEntity(e: ImpactStatus)(implicit graph: Graph, authContext: AuthContext): Try[ImpactStatus with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("ImpactStatus") + integrityCheckActor ! EntityAdded("ImpactStatus") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/IntegrityCheckActor.scala b/thehive/app/org/thp/thehive/services/IntegrityCheckActor.scala index 208c9db491..e864d75cd2 100644 --- a/thehive/app/org/thp/thehive/services/IntegrityCheckActor.scala +++ b/thehive/app/org/thp/thehive/services/IntegrityCheckActor.scala @@ -18,14 +18,12 @@ import scala.collection.immutable import scala.concurrent.duration.{Duration, FiniteDuration} import scala.util.Success -object IntegrityCheckActor { - case class EntityAdded(name: String) -} +sealed trait IntegrityCheckMessage +case class EntityAdded(name: String) extends IntegrityCheckMessage +case class NeedCheck(name: String) extends IntegrityCheckMessage +case class Check(name: String) extends IntegrityCheckMessage class IntegrityCheckActor() extends Actor { - case class NeedCheck(name: String) - case class Check(name: String) - import IntegrityCheckActor._ lazy val logger: Logger = Logger(getClass) lazy val injector: Injector = GuiceAkkaExtension(context.system).injector @@ -44,10 +42,8 @@ class IntegrityCheckActor() extends Actor { def interval(name: String): FiniteDuration = configuration.getOptional[FiniteDuration](s"integrityCheck.$name.interval").getOrElse(defaultInitalDelay) - lazy val integrityCheckMap: Map[String, IntegrityCheckOps[_]] = { - + lazy val integrityCheckMap: Map[String, IntegrityCheckOps[_]] = integrityCheckOps.map(d => d.name -> d).toMap - } def check(name: String): Unit = integrityCheckMap.get(name).foreach(_.check()) override def preStart(): Unit = { diff --git a/thehive/app/org/thp/thehive/services/IntegrityCheckSerializer.scala b/thehive/app/org/thp/thehive/services/IntegrityCheckSerializer.scala new file mode 100644 index 0000000000..4ab8dc9650 --- /dev/null +++ b/thehive/app/org/thp/thehive/services/IntegrityCheckSerializer.scala @@ -0,0 +1,27 @@ +package org.thp.thehive.services + +import akka.serialization.Serializer + +import java.io.NotSerializableException + +class IntegrityCheckSerializer extends Serializer { + override def identifier: Int = -604584588 + + override def includeManifest: Boolean = false + + override def toBinary(o: AnyRef): Array[Byte] = + o match { + case EntityAdded(name) => 0.toByte +: name.getBytes + case NeedCheck(name) => 1.toByte +: name.getBytes + case Check(name) => 2.toByte +: name.getBytes + case _ => throw new NotSerializableException + } + + override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = + bytes(0) match { + case 0 => EntityAdded(new String(bytes.tail)) + case 1 => NeedCheck(new String(bytes.tail)) + case 2 => Check(new String(bytes.tail)) + case _ => throw new NotSerializableException + } +} diff --git a/thehive/app/org/thp/thehive/services/LogSrv.scala b/thehive/app/org/thp/thehive/services/LogSrv.scala index 9e0aa4f5b5..801f2c82a0 100644 --- a/thehive/app/org/thp/thehive/services/LogSrv.scala +++ b/thehive/app/org/thp/thehive/services/LogSrv.scala @@ -1,8 +1,5 @@ package org.thp.thehive.services -import java.util -import scala.util.Success -import javax.inject.{Inject, Named, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.EntityIdOrName import org.thp.scalligraph.auth.{AuthContext, Permission} @@ -16,9 +13,11 @@ import org.thp.thehive.controllers.v1.Conversion._ import org.thp.thehive.models._ import org.thp.thehive.services.LogOps._ import org.thp.thehive.services.TaskOps._ -import play.api.libs.json.{JsObject, Json} +import play.api.libs.json.JsObject -import scala.util.Try +import java.util +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Success, Try} @Singleton class LogSrv @Inject() (attachmentSrv: AttachmentSrv, auditSrv: AuditSrv, taskSrv: TaskSrv, userSrv: UserSrv)(implicit @@ -27,32 +26,17 @@ class LogSrv @Inject() (attachmentSrv: AttachmentSrv, auditSrv: AuditSrv, taskSr val taskLogSrv = new EdgeSrv[TaskLog, Task, Log] val logAttachmentSrv = new EdgeSrv[LogAttachment, Log, Attachment] - def create(log: Log, task: Task with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Log with Entity] = + def create(log: Log, task: Task with Entity, file: Option[FFile])(implicit graph: Graph, authContext: AuthContext): Try[RichLog] = for { createdLog <- createEntity(log) _ <- taskLogSrv.create(TaskLog(), task, createdLog) user <- userSrv.current.getOrFail("User") // user is used only if task status is waiting but the code is cleaner _ <- if (task.status == TaskStatus.Waiting) taskSrv.updateStatus(task, user, TaskStatus.InProgress) else Success(()) - _ <- auditSrv.log.create(createdLog, task, RichLog(createdLog, Nil).toJson) - } yield createdLog - - def addAttachment(log: Log with Entity, file: FFile)(implicit graph: Graph, authContext: AuthContext): Try[Attachment with Entity] = - for { - task <- get(log).task.getOrFail("Task") - attachment <- attachmentSrv.create(file) - _ <- addAttachment(log, attachment) - _ <- auditSrv.log.update(log, task, Json.obj("attachment" -> attachment.name)) - } yield attachment - - def addAttachment( - log: Log with Entity, - attachment: Attachment with Entity - )(implicit graph: Graph, authContext: AuthContext): Try[Attachment with Entity] = - for { - _ <- logAttachmentSrv.create(LogAttachment(), log, attachment) - task <- get(log).task.getOrFail("Task") - _ <- auditSrv.log.update(log, task, Json.obj("attachment" -> attachment.name)) - } yield attachment + attachment <- file.map(attachmentSrv.create).flip + _ <- attachment.map(logAttachmentSrv.create(LogAttachment(), createdLog, _)).flip + richLog = RichLog(createdLog, Nil) + _ <- auditSrv.log.create(createdLog, task, richLog.toJson) + } yield richLog def cascadeRemove(log: Log with Entity)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = for { diff --git a/thehive/app/org/thp/thehive/services/ObservableSrv.scala b/thehive/app/org/thp/thehive/services/ObservableSrv.scala index dc47f08504..b95f40f7aa 100644 --- a/thehive/app/org/thp/thehive/services/ObservableSrv.scala +++ b/thehive/app/org/thp/thehive/services/ObservableSrv.scala @@ -235,7 +235,8 @@ object ObservableOps { .profile .domainMap(profile => profile.permissions & authContext.permissions) - def organisations: Traversal.V[Organisation] = traversal.in[ShareObservable].in[OrganisationShare].v[Organisation] + def organisations: Traversal.V[Organisation] = + traversal.coalesceIdent(_.in[ShareObservable].in[OrganisationShare], _.in[AlertObservable].out[AlertOrganisation]).v[Organisation] def origin: Traversal.V[Organisation] = shares.has(_.owner, true).organisation diff --git a/thehive/app/org/thp/thehive/services/ObservableTypeSrv.scala b/thehive/app/org/thp/thehive/services/ObservableTypeSrv.scala index 6b7fafc470..1a3c2fff26 100644 --- a/thehive/app/org/thp/thehive/services/ObservableTypeSrv.scala +++ b/thehive/app/org/thp/thehive/services/ObservableTypeSrv.scala @@ -27,7 +27,7 @@ class ObservableTypeSrv @Inject() (@Named("integrity-check-actor") integrityChec override def exists(e: ObservableType)(implicit graph: Graph): Boolean = startTraversal.getByName(e.name).exists override def createEntity(e: ObservableType)(implicit graph: Graph, authContext: AuthContext): Try[ObservableType with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("ObservableType") + integrityCheckActor ! EntityAdded("ObservableType") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 69af5f84de..6d4115acd8 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -36,7 +36,7 @@ class OrganisationSrv @Inject() ( val organisationShareSrv = new EdgeSrv[OrganisationShare, Organisation, Share] override def createEntity(e: Organisation)(implicit graph: Graph, authContext: AuthContext): Try[Organisation with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Organisation") + integrityCheckActor ! EntityAdded("Organisation") super.createEntity(e) } @@ -162,7 +162,7 @@ object OrganisationOps { if (authContext.isPermitted(Permissions.manageOrganisation)) traversal else - traversal.filter(_.visibleOrganisationsTo.users.current) + traversal.filter(_.visibleOrganisationsTo.current) def richOrganisation: Traversal[RichOrganisation, JMap[String, Any], Converter[RichOrganisation, JMap[String, Any]]] = traversal diff --git a/thehive/app/org/thp/thehive/services/ProfileSrv.scala b/thehive/app/org/thp/thehive/services/ProfileSrv.scala index 0a066028b6..e0941b7581 100644 --- a/thehive/app/org/thp/thehive/services/ProfileSrv.scala +++ b/thehive/app/org/thp/thehive/services/ProfileSrv.scala @@ -29,7 +29,7 @@ class ProfileSrv @Inject() ( lazy val orgAdmin: Profile with Entity = db.roTransaction(graph => getOrFail(EntityName(Profile.orgAdmin.name))(graph)).get override def createEntity(e: Profile)(implicit graph: Graph, authContext: AuthContext): Try[Profile with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Profile") + integrityCheckActor ! EntityAdded("Profile") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/ResolutionStatusSrv.scala b/thehive/app/org/thp/thehive/services/ResolutionStatusSrv.scala index d66d6e1ce4..02e8bbc873 100644 --- a/thehive/app/org/thp/thehive/services/ResolutionStatusSrv.scala +++ b/thehive/app/org/thp/thehive/services/ResolutionStatusSrv.scala @@ -23,7 +23,7 @@ class ResolutionStatusSrv @Inject() (@Named("integrity-check-actor") integrityCh startTraversal.getByName(name) override def createEntity(e: ResolutionStatus)(implicit graph: Graph, authContext: AuthContext): Try[ResolutionStatus with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Resolution") + integrityCheckActor ! EntityAdded("Resolution") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/ShareSrv.scala b/thehive/app/org/thp/thehive/services/ShareSrv.scala index e0d1ae4c7d..a385687ed3 100644 --- a/thehive/app/org/thp/thehive/services/ShareSrv.scala +++ b/thehive/app/org/thp/thehive/services/ShareSrv.scala @@ -149,7 +149,7 @@ class ShareSrv @Inject() ( get(share) .`case` .tasks - .filterNot(_.shares.hasId(share._id)) + .filterNot(_.taskToShares.hasId(share._id)) .toIterator .toTry(shareTaskSrv.create(ShareTask(), share, _)) @@ -213,7 +213,7 @@ class ShareSrv @Inject() ( )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val (orgsToAdd, orgsToRemove) = taskSrv .get(task) - .shares + .taskToShares .organisation .toIterator .foldLeft((organisations.toSet, Set.empty[Organisation with Entity])) { @@ -239,7 +239,7 @@ class ShareSrv @Inject() ( )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { val existingOrgs = taskSrv .get(task) - .shares + .taskToShares .organisation .toSeq diff --git a/thehive/app/org/thp/thehive/services/TagSrv.scala b/thehive/app/org/thp/thehive/services/TagSrv.scala index a035558eaa..9620a7b3d0 100644 --- a/thehive/app/org/thp/thehive/services/TagSrv.scala +++ b/thehive/app/org/thp/thehive/services/TagSrv.scala @@ -53,7 +53,7 @@ class TagSrv @Inject() (appConfig: ApplicationConfig, @Named("integrity-check-ac } override def createEntity(e: Tag)(implicit graph: Graph, authContext: AuthContext): Try[Tag with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("Tag") + integrityCheckActor ! EntityAdded("Tag") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/TaskSrv.scala b/thehive/app/org/thp/thehive/services/TaskSrv.scala index 574d074392..5602d2e68a 100644 --- a/thehive/app/org/thp/thehive/services/TaskSrv.scala +++ b/thehive/app/org/thp/thehive/services/TaskSrv.scala @@ -1,9 +1,5 @@ package org.thp.thehive.services -import java.util -import java.util.Date - -import javax.inject.{Inject, Named, Provider, Singleton} import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.scalligraph.EntityIdOrName import org.thp.scalligraph.auth.{AuthContext, Permission} @@ -18,10 +14,13 @@ import org.thp.thehive.services.ShareOps._ import org.thp.thehive.services.TaskOps._ import play.api.libs.json.{JsNull, JsObject, Json} +import java.lang.{Boolean => JBoolean} +import java.util.{Date, Map => JMap} +import javax.inject.{Inject, Named, Provider, Singleton} import scala.util.{Failure, Success, Try} @Singleton -class TaskSrv @Inject() (caseSrvProvider: Provider[CaseSrv], auditSrv: AuditSrv)(implicit +class TaskSrv @Inject() (caseSrvProvider: Provider[CaseSrv], auditSrv: AuditSrv, organisationSrv: OrganisationSrv)(implicit @Named("with-thehive-schema") db: Database ) extends VertexSrv[Task] { @@ -48,7 +47,7 @@ class TaskSrv @Inject() (caseSrvProvider: Provider[CaseSrv], auditSrv: AuditSrv) get(task).caseTemplate.headOption match { case None => get(task) - .shares + .taskToShares .toIterator .toTry { share => auditSrv @@ -114,6 +113,23 @@ class TaskSrv @Inject() (caseSrvProvider: Provider[CaseSrv], auditSrv: AuditSrv) _ <- auditSrv.task.update(task, Json.obj("assignee" -> user.login)) } yield () } + + def actionRequired( + task: Task with Entity, + organisation: Organisation with Entity, + actionRequired: Boolean + )(implicit graph: Graph, authContext: AuthContext): Try[Unit] = { + val details = Json.obj(s"actionRequired.${organisation.name}" -> actionRequired) + auditSrv.task.update(task, details).map { _ => + organisationSrv + .get(organisation) + .out[OrganisationShare] + .outE[ShareTask] + .filter(_.inV.v[Task].hasId(task._id)) + .update(_.actionRequired, actionRequired) + .iterate() + } + } } object TaskOps { @@ -129,7 +145,7 @@ object TaskOps { def can(permission: Permission)(implicit authContext: AuthContext): Traversal.V[Task] = if (authContext.permissions.contains(permission)) - traversal.filter(_.shares.filter(_.profile.has(_.permissions, permission)).organisation.current) + traversal.filter(_.taskToShares.filter(_.profile.has(_.permissions, permission)).organisation.current) else traversal.limit(0) @@ -148,10 +164,11 @@ object TaskOps { def unassigned: Traversal.V[Task] = traversal.filterNot(_.outE[TaskUser]) def organisations: Traversal.V[Organisation] = traversal.in[ShareTask].in[OrganisationShare].v[Organisation] + def organisations(permission: Permission): Traversal.V[Organisation] = - shares.filter(_.profile.has(_.permissions, permission)).organisation + taskToShares.filter(_.profile.has(_.permissions, permission)).organisation - def origin: Traversal.V[Organisation] = shares.has(_.owner, true).organisation + def origin: Traversal.V[Organisation] = taskToShares.has(_.owner, true).organisation def assignableUsers(implicit authContext: AuthContext): Traversal.V[User] = organisations(Permissions.manageTask) @@ -159,7 +176,31 @@ object TaskOps { .users(Permissions.manageTask) .dedup - def richTask: Traversal[RichTask, util.Map[String, Any], Converter[RichTask, util.Map[String, Any]]] = + def actionRequired(implicit authContext: AuthContext): Traversal[Boolean, JBoolean, Converter[Boolean, JBoolean]] = + traversal.inE[ShareTask].filter(_.outV.v[Share].organisation.current).value(_.actionRequired) + + def actionRequiredMap(implicit + authContext: AuthContext + ): Traversal[(String, Boolean), JMap[String, Any], Converter[(String, Boolean), JMap[String, Any]]] = + traversal + .inE[ShareTask] + .filter(_.outV.v[Share].organisation.visible) + .project( + _.by(_.outV.v[Share].organisation.value(_.name)) + .byValue(_.actionRequired) + ) + + def richTask: Traversal[RichTask, JMap[String, Any], Converter[RichTask, JMap[String, Any]]] = + traversal + .project( + _.by + .by(_.out[TaskUser].v[User].fold) + ) + .domainMap { + case (task, user) => RichTask(task, user.headOption) + } + + def richTaskWithoutActionRequired: Traversal[RichTask, JMap[String, Any], Converter[RichTask, JMap[String, Any]]] = traversal .project( _.by @@ -171,7 +212,7 @@ object TaskOps { def richTaskWithCustomRenderer[D, G, C <: Converter[D, G]]( entityRenderer: Traversal.V[Task] => Traversal[D, G, C] - ): Traversal[(RichTask, D), util.Map[String, Any], Converter[(RichTask, D), util.Map[String, Any]]] = + ): Traversal[(RichTask, D), JMap[String, Any], Converter[(RichTask, D), JMap[String, Any]]] = traversal .project( _.by @@ -185,7 +226,7 @@ object TaskOps { def unassign(): Unit = traversal.outE[TaskUser].remove() - def shares: Traversal.V[Share] = traversal.in[ShareTask].v[Share] + def taskToShares: Traversal.V[Share] = traversal.in[ShareTask].v[Share] def share(implicit authContext: AuthContext): Traversal.V[Share] = share(authContext.organisation) diff --git a/thehive/app/org/thp/thehive/services/UserSrv.scala b/thehive/app/org/thp/thehive/services/UserSrv.scala index 0fb1f1e743..b236698813 100644 --- a/thehive/app/org/thp/thehive/services/UserSrv.scala +++ b/thehive/app/org/thp/thehive/services/UserSrv.scala @@ -42,7 +42,7 @@ class UserSrv @Inject() ( val userAttachmentSrv = new EdgeSrv[UserAttachment, User, Attachment] override def createEntity(e: User)(implicit graph: Graph, authContext: AuthContext): Try[User with Entity] = { - integrityCheckActor ! IntegrityCheckActor.EntityAdded("User") + integrityCheckActor ! EntityAdded("User") super.createEntity(e) } diff --git a/thehive/app/org/thp/thehive/services/notification/NotificationSerializer.scala b/thehive/app/org/thp/thehive/services/notification/NotificationSerializer.scala index 5f93113634..6a6a578708 100644 --- a/thehive/app/org/thp/thehive/services/notification/NotificationSerializer.scala +++ b/thehive/app/org/thp/thehive/services/notification/NotificationSerializer.scala @@ -15,9 +15,9 @@ class NotificationSerializer extends Serializer { */ def toBinary(o: AnyRef): Array[Byte] = o match { - case m: NotificationExecution => Json.toBytes(Json.toJson(m)) - case m: AuditNotificationMessage => Json.toBytes(Json.toJson(m)) - case _ => Array.empty[Byte] // Not serializable + case m: NotificationExecution => 0.toByte +: Json.toBytes(Json.toJson(m)) + case m: AuditNotificationMessage => 1.toByte +: Json.toBytes(Json.toJson(m)) + case _ => throw new NotSerializableException } /** @@ -26,11 +26,9 @@ class NotificationSerializer extends Serializer { */ @throws(classOf[NotSerializableException]) def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = - manifest - .flatMap { - case c if c == classOf[NotificationExecution] => Json.parse(bytes).asOpt[NotificationExecution] - case c if c == classOf[AuditNotificationMessage] => Json.parse(bytes).asOpt[AuditNotificationMessage] - case _ => None - } - .getOrElse(throw new NotSerializableException) + bytes(0) match { + case 0 => Json.parse(bytes.tail).asOpt[NotificationExecution] + case 1 => Json.parse(bytes.tail).asOpt[AuditNotificationMessage] + case _ => throw new NotSerializableException + } } diff --git a/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala b/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala index 20bec8981e..024d84b732 100644 --- a/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala +++ b/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala @@ -93,7 +93,7 @@ class Webhook( def taskToJson: Traversal.V[Task] => Traversal[JsObject, JMap[String, Any], Converter[JsObject, JMap[String, Any]]] = _.project( - _.by(_.richTask.domainMap(_.toJson)) + _.by(_.richTaskWithoutActionRequired.domainMap(_.toJson)) .by(t => caseToJson(t.`case`)) ).domainMap { case (task, case0) => task.as[JsObject] + ("case" -> case0) diff --git a/thehive/conf/play/reference-overrides.conf b/thehive/conf/play/reference-overrides.conf index 4581bd5806..afb748cb43 100644 --- a/thehive/conf/play/reference-overrides.conf +++ b/thehive/conf/play/reference-overrides.conf @@ -22,10 +22,16 @@ akka.actor { serializers { stream = "org.thp.thehive.services.StreamSerializer" notification = "org.thp.thehive.services.notification.NotificationSerializer" + thehive-schema-updater = "org.thp.thehive.models.SchemaUpdaterSerializer" + flow = "org.thp.thehive.services.FlowSerializer" + integrity = "org.thp.thehive.services.IntegrityCheckSerializer" } serialization-bindings { "org.thp.thehive.services.StreamMessage" = stream "org.thp.thehive.services.notification.NotificationMessage" = notification + "org.thp.thehive.models.SchemaUpdaterMessage" = thehive-schema-updater + "org.thp.thehive.services.FlowMessage" = flow + "org.thp.thehive.services.IntegrityCheckMessage" = integrity } } diff --git a/thehive/conf/reference.conf b/thehive/conf/reference.conf index f9f5bf54d3..403e23198c 100644 --- a/thehive/conf/reference.conf +++ b/thehive/conf/reference.conf @@ -41,6 +41,7 @@ stream.longPolling { maxWait: 1 second graceDuration: 100 milliseconds keepAlive: 5 minutes + pollingDuration: 1 second } tags { diff --git a/thehive/test/org/thp/thehive/DevStart.scala b/thehive/test/org/thp/thehive/DevStart.scala new file mode 100644 index 0000000000..21ab8291bc --- /dev/null +++ b/thehive/test/org/thp/thehive/DevStart.scala @@ -0,0 +1,63 @@ +package org.thp.thehive + +import java.io.File + +import play.api._ +import play.core.server.{RealServerProcess, ServerConfig, ServerProcess, ServerProvider} + +object DevStart extends App { + val process = new RealServerProcess(args) + val config = readConfig(process) + + val application: Application = { + val environment = Environment(config.rootDir, process.classLoader, Mode.Dev) + val context = ApplicationLoader.Context.create(environment) + val loader = ApplicationLoader(context) + loader.load(context) + } + Play.start(application) + + // Start the server + val serverProvider = ServerProvider.fromConfiguration(process.classLoader, config.configuration) + val server = serverProvider.createServer(config, application) + + process.addShutdownHook { + if (application.coordinatedShutdown.shutdownReason().isEmpty) + server.stop() + } + + def readConfig(process: ServerProcess) = { + val configuration: Configuration = { + val rootDirArg = process.args.headOption.map(new File(_)) + val rootDirConfig = rootDirArg.fold(Map.empty[String, String])(ServerConfig.rootDirConfig) + Configuration.load(process.classLoader, process.properties, rootDirConfig, allowMissingApplicationConf = true) + } + val rootDir: File = { + val path = configuration + .getOptional[String]("play.server.dir") + .getOrElse(sys.error("No root server path supplied")) + val file = new File(path) + if (!file.isDirectory) + sys.error(s"Bad root server path: $path") + file + } + + def parsePort(portType: String): Option[Int] = + configuration.getOptional[String](s"play.server.$portType.port").filter(_ != "disabled").map { str => + try Integer.parseInt(str) + catch { + case _: NumberFormatException => + sys.error(s"Invalid ${portType.toUpperCase} port: $str") + } + } + + val httpPort = parsePort("http") + val httpsPort = parsePort("https") + val address = configuration.getOptional[String]("play.server.http.address").getOrElse("0.0.0.0") + + if (httpPort.orElse(httpsPort).isEmpty) + sys.error("Must provide either an HTTP or HTTPS port") + + ServerConfig(rootDir, httpPort, httpsPort, address, Mode.Dev, process.properties, configuration) + } +} diff --git a/thehive/test/org/thp/thehive/controllers/v0/CaseCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/CaseCtrlTest.scala index f677c55520..edbc06388d 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/CaseCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/CaseCtrlTest.scala @@ -327,7 +327,7 @@ class CaseCtrlTest extends PlaySpecification with TestAppBuilder { status(result) must_=== 200 val resultCase = contentAsJson(result) - (resultCase \ "count").asOpt[Int] must beSome(2) + (resultCase \ "count").asOpt[Int] must beSome(3) (resultCase \ "testNamespace:testPredicate=\"t1\"" \ "count").asOpt[Int] must beSome(2) (resultCase \ "testNamespace:testPredicate=\"t2\"" \ "count").asOpt[Int] must beSome(1) (resultCase \ "testNamespace:testPredicate=\"t3\"" \ "count").asOpt[Int] must beSome(1) diff --git a/thehive/test/org/thp/thehive/controllers/v0/CaseTemplateCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/CaseTemplateCtrlTest.scala index 31c50cec90..27abdee9de 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/CaseTemplateCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/CaseTemplateCtrlTest.scala @@ -11,6 +11,8 @@ import play.api.libs.json.Json import play.api.test.{FakeRequest, PlaySpecification} class CaseTemplateCtrlTest extends PlaySpecification with TestAppBuilder { +// TODO what to do with unused test ? + // val dummyUserSrv = DummyUserSrv(userId = "admin@thehive.local", permissions = Permissions.all) // def getAndTestCaseTemplate(name: String, description: String)(body: OutputCaseTemplate => MatchResult[Any]) = { diff --git a/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala index d26f20a8d6..eb1921fae6 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala @@ -66,7 +66,8 @@ class StatusCtrlTest extends PlaySpecification with TestAppBuilder { "protectDownloadsWith" -> config.get[String]("datastore.attachment.password"), "authType" -> Seq("local", "key", "header"), "capabilities" -> Seq("changePassword", "setPassword", "authByKey"), - "ssoAutoLogin" -> config.get[Boolean]("user.autoCreateOnSso") + "ssoAutoLogin" -> config.get[Boolean]("user.autoCreateOnSso"), + "pollingDuration" -> 1000 ) ) diff --git a/thehive/test/org/thp/thehive/controllers/v0/TaskCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/TaskCtrlTest.scala index 42e78d7cdb..960137fc6a 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/TaskCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/TaskCtrlTest.scala @@ -1,7 +1,5 @@ package org.thp.thehive.controllers.v0 -import java.util.Date - import akka.stream.Materializer import io.scalaland.chimney.dsl._ import org.thp.scalligraph.models.Database @@ -15,6 +13,8 @@ import org.thp.thehive.services.{CaseSrv, TaskSrv} import play.api.libs.json.Json import play.api.test.{FakeRequest, PlaySpecification} +import java.util.Date + case class TestTask( title: String, group: Option[String] = None, diff --git a/thehive/test/org/thp/thehive/services/CaseSrvTest.scala b/thehive/test/org/thp/thehive/services/CaseSrvTest.scala index 258c2c38cc..fcf58ac2f8 100644 --- a/thehive/test/org/thp/thehive/services/CaseSrvTest.scala +++ b/thehive/test/org/thp/thehive/services/CaseSrvTest.scala @@ -207,7 +207,7 @@ class CaseSrvTest extends PlaySpecification with TestAppBuilder { "get correct next case number" in testApp { app => app[Database].roTransaction { implicit graph => - app[CaseSrv].nextCaseNumber shouldEqual 4 + app[CaseSrv].nextCaseNumber shouldEqual 6 } } diff --git a/thehive/test/org/thp/thehive/services/TaskSrvTest.scala b/thehive/test/org/thp/thehive/services/TaskSrvTest.scala new file mode 100644 index 0000000000..986fc6a507 --- /dev/null +++ b/thehive/test/org/thp/thehive/services/TaskSrvTest.scala @@ -0,0 +1,53 @@ +package org.thp.thehive.services + +import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.models.{Database, DummyUserSrv} +import org.thp.scalligraph.traversal.TraversalOps.TraversalOpsDefs +import org.thp.thehive.TestAppBuilder +import org.thp.thehive.services.TaskOps._ +import play.api.test.PlaySpecification + +class TaskSrvTest extends PlaySpecification with TestAppBuilder { + "task service" should { + + "set actionRequired for an organisation" in testApp { app => + app[Database].transaction { implicit graph => + implicit val authContext: AuthContext = DummyUserSrv(organisation = "cert").authContext + + val task = app[TaskSrv].startTraversal.has(_.title, "taskActionRequired1").getOrFail("Task").get + val cert = app[OrganisationSrv].startTraversal.has(_.name, "cert").getOrFail("Organisation").get + def getActionRequired = app[TaskSrv].get(task).actionRequired.toSeq.head + def getActionRequiredMap = app[TaskSrv].get(task).actionRequiredMap.toSeq.toMap + + getActionRequired must beEqualTo(false) + getActionRequiredMap must beEqualTo(Map("soc" -> false, "cert" -> false)) + + app[TaskSrv].actionRequired(task, cert, actionRequired = true) + + getActionRequired must beEqualTo(true) + getActionRequiredMap must beEqualTo(Map("soc" -> false, "cert" -> true)) + } + } + + "unset actionRequired for an organisation" in testApp { app => + app[Database].transaction { implicit graph => + implicit val authContext: AuthContext = DummyUserSrv(organisation = "cert").authContext + + val task = app[TaskSrv].startTraversal.has(_.title, "taskActionRequired2").getOrFail("Task").get + val cert = app[OrganisationSrv].startTraversal.has(_.name, "cert").getOrFail("Organisation").get + def getActionRequired = app[TaskSrv].get(task).actionRequired.toSeq.head + def getActionRequiredMap = app[TaskSrv].get(task).actionRequiredMap.toSeq.toMap + + getActionRequired must beEqualTo(true) + getActionRequiredMap must beEqualTo(Map("soc" -> false, "cert" -> true)) + + app[TaskSrv].actionRequired(task, cert, actionRequired = false) + + getActionRequired must beEqualTo(false) + getActionRequiredMap must beEqualTo(Map("soc" -> false, "cert" -> false)) + } + } + + } + +} diff --git a/thehive/test/resources/data/Case.json b/thehive/test/resources/data/Case.json index e7e28530e8..15a632b69a 100644 --- a/thehive/test/resources/data/Case.json +++ b/thehive/test/resources/data/Case.json @@ -34,5 +34,29 @@ "tlp": 2, "pap": 2, "status": "Open" + }, + { + "id": "caseActionRequired1", + "number": 4, + "title": "case#10", + "description": "description of case #10", + "severity": 3, + "startDate": 1531667370000, + "flag": false, + "tlp": 2, + "pap": 2, + "status": "Open" + }, + { + "id": "caseActionRequired2", + "number": 5, + "title": "case#11", + "description": "description of case #11", + "severity": 3, + "startDate": 1531667370000, + "flag": false, + "tlp": 2, + "pap": 2, + "status": "Open" } ] \ No newline at end of file diff --git a/thehive/test/resources/data/OrganisationShare.json b/thehive/test/resources/data/OrganisationShare.json index 36fb4352f3..4f715d24fa 100644 --- a/thehive/test/resources/data/OrganisationShare.json +++ b/thehive/test/resources/data/OrganisationShare.json @@ -2,5 +2,7 @@ {"from": "cert", "to": "case1-cert"}, {"from": "cert", "to": "case2-cert"}, {"from": "soc", "to": "case2-soc"}, - {"from": "soc", "to": "case3-soc"} + {"from": "soc", "to": "case3-soc"}, + {"from": "soc", "to": "case-actionRequired-soc"}, + {"from": "cert", "to": "case-actionRequired-cert"} ] \ No newline at end of file diff --git a/thehive/test/resources/data/Share.json b/thehive/test/resources/data/Share.json index f10a76f4e2..9f3e4d9714 100644 --- a/thehive/test/resources/data/Share.json +++ b/thehive/test/resources/data/Share.json @@ -2,5 +2,7 @@ {"id": "case1-cert", "owner": true}, {"id": "case2-cert", "owner": true}, {"id": "case2-soc", "owner": false}, - {"id": "case3-soc", "owner": true} + {"id": "case3-soc", "owner": true}, + {"id": "case-actionRequired-soc", "owner": true}, + {"id": "case-actionRequired-cert", "owner": true} ] \ No newline at end of file diff --git a/thehive/test/resources/data/ShareCase.json b/thehive/test/resources/data/ShareCase.json index f3e1b02f94..6a4a54d2fa 100644 --- a/thehive/test/resources/data/ShareCase.json +++ b/thehive/test/resources/data/ShareCase.json @@ -2,5 +2,7 @@ {"from": "case1-cert", "to": "case1"}, {"from": "case2-cert", "to": "case2"}, {"from": "case2-soc", "to": "case2"}, - {"from": "case3-soc", "to": "case3"} + {"from": "case3-soc", "to": "case3"}, + {"from": "case-actionRequired-soc", "to": "caseActionRequired1"}, + {"from": "case-actionRequired-cert", "to": "caseActionRequired2"} ] \ No newline at end of file diff --git a/thehive/test/resources/data/ShareTask.json b/thehive/test/resources/data/ShareTask.json index 075623d6ef..3d0b13d34e 100644 --- a/thehive/test/resources/data/ShareTask.json +++ b/thehive/test/resources/data/ShareTask.json @@ -1,7 +1,11 @@ [ - {"from": "case1-cert", "to": "task1"}, - {"from": "case1-cert", "to": "task2"}, - {"from": "case2-soc", "to": "task3"}, - {"from": "case2-cert", "to": "task4"}, - {"from": "case3-soc", "to": "task5"} + {"from": "case1-cert", "to": "task1", "actionRequired": false}, + {"from": "case1-cert", "to": "task2", "actionRequired": false}, + {"from": "case2-soc", "to": "task3", "actionRequired": false}, + {"from": "case2-cert", "to": "task4", "actionRequired": false}, + {"from": "case3-soc", "to": "task5", "actionRequired": false}, + {"from": "case-actionRequired-soc", "to": "taskActionRequired1", "actionRequired": false}, + {"from": "case-actionRequired-cert", "to": "taskActionRequired1", "actionRequired": false}, + {"from": "case-actionRequired-soc", "to": "taskActionRequired2", "actionRequired": false}, + {"from": "case-actionRequired-cert", "to": "taskActionRequired2", "actionRequired": true} ] \ No newline at end of file diff --git a/thehive/test/resources/data/Task.json b/thehive/test/resources/data/Task.json index 06db6dc2b3..b285d22a9b 100644 --- a/thehive/test/resources/data/Task.json +++ b/thehive/test/resources/data/Task.json @@ -43,5 +43,23 @@ "status": "Waiting", "flag": true, "order": 0 + }, + { + "id": "taskActionRequired1", + "title": "taskActionRequired1", + "group": "groupActionRequired", + "description": "description task Required", + "status": "Waiting", + "flag": true, + "order": 0 + }, + { + "id": "taskActionRequired2", + "title": "taskActionRequired2", + "group": "groupActionRequired", + "description": "description task Required", + "status": "Waiting", + "flag": true, + "order": 0 } ] \ No newline at end of file