diff --git a/.drone.yml b/.drone.yml index 16354f6ca6..2b2ebfffeb 100644 --- a/.drone.yml +++ b/.drone.yml @@ -8,7 +8,7 @@ steps: - name: submodules image: alpine/git commands: - - git submodule update --recursive --init --remote + - git submodule update --recursive --init # Restore cache of downloaded dependencies - name: restore-cache diff --git a/CHANGELOG.md b/CHANGELOG.md index b5b000c62a..c0058adc08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Change Log +## [4.0.5](https://github.com/TheHive-Project/TheHive/milestone/68) (2021-02-08) + +**Implemented enhancements:** + +- Support for using asterisks by tag-filtering [\#933](https://github.com/TheHive-Project/TheHive/issues/933) +- "Close tasks and case" deletes tasks instead of closing them [\#1755](https://github.com/TheHive-Project/TheHive/issues/1755) +- [Enhancement] Add schema update status in status API [\#1782](https://github.com/TheHive-Project/TheHive/issues/1782) + +**Closed issues:** + +- Running TheHive 4.0.1-1 it appears that application.log is no longer rotated. [\#1746](https://github.com/TheHive-Project/TheHive/issues/1746) + +**Fixed bugs:** + +- [Bug] RPM package does not create secret.conf file [\#1248](https://github.com/TheHive-Project/TheHive/issues/1248) +- [Bug] More webhooks or more detailed webhook events [\#1739](https://github.com/TheHive-Project/TheHive/issues/1739) +- [Bug] Webhooks opening infinite amount of files [\#1743](https://github.com/TheHive-Project/TheHive/issues/1743) +- [Bug] Dashboards are always created as private [\#1754](https://github.com/TheHive-Project/TheHive/issues/1754) +- [Bug]/Unable to get MISP organisation [\#1758](https://github.com/TheHive-Project/TheHive/issues/1758) +- [Bug] TheHive 4 Cluster and Haproxy with roundrobin [\#1760](https://github.com/TheHive-Project/TheHive/issues/1760) +- [Bug] TheHive -> MISP works. MISP -> TheHive not. [\#1761](https://github.com/TheHive-Project/TheHive/issues/1761) +- [Bug] TheHive 4.0.4 cannot show tasks created in previous versions [\#1763](https://github.com/TheHive-Project/TheHive/issues/1763) +- [Bug] `Imported` property in Alerts not taken into account [\#1769](https://github.com/TheHive-Project/TheHive/issues/1769) +- [Bug] Sort field list in dashboard widget filters [\#1771](https://github.com/TheHive-Project/TheHive/issues/1771) +- [Bug] Dashboard on organisation (and other) doesn't work [\#1772](https://github.com/TheHive-Project/TheHive/issues/1772) +- [BUG] Cannot link multiple organisations together [\#1773](https://github.com/TheHive-Project/TheHive/issues/1773) +- [Bug] Fix pivoting from donuts to search pages on custom fields based widgets [\#1777](https://github.com/TheHive-Project/TheHive/issues/1777) +- [Bug] Fix custom field filters in v0 APIs [\#1779](https://github.com/TheHive-Project/TheHive/issues/1779) + ## [4.0.4](https://github.com/TheHive-Project/TheHive/milestone/67) (2021-01-12) **Implemented enhancements:** @@ -269,7 +298,6 @@ - SearchSrv.NotFoundError [\#1242](https://github.com/TheHive-Project/TheHive/issues/1242) - Assignee is not changeable [\#1243](https://github.com/TheHive-Project/TheHive/issues/1243) - [Bug] In TheHive, a user is a member of one or more organisations. One user has a profile for each organisation and can have different profiles for different organisations. [\#1247](https://github.com/TheHive-Project/TheHive/issues/1247) -- [Bug] RPM package does not create secret.conf file [\#1248](https://github.com/TheHive-Project/TheHive/issues/1248) - [Bug] Unable to save new or imported dashboards in 4.0-RC1 [\#1250](https://github.com/TheHive-Project/TheHive/issues/1250) - [Bug] Header Variable authentication does not work [\#1251](https://github.com/TheHive-Project/TheHive/issues/1251) - Filtering by custom fields returns no results [\#1252](https://github.com/TheHive-Project/TheHive/issues/1252) diff --git a/ScalliGraph b/ScalliGraph index 33fcd753fa..213e4478d3 160000 --- a/ScalliGraph +++ b/ScalliGraph @@ -1 +1 @@ -Subproject commit 33fcd753fa102062ab54411fef169c847f1501db +Subproject commit 213e4478d349afeb3e9978c39042458fde6a61b9 diff --git a/build.sbt b/build.sbt index b91c194ce2..4de4641c4a 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ import Dependencies._ import com.typesafe.sbt.packager.Keys.bashScriptDefines import org.thp.ghcl.Milestone -val thehiveVersion = "4.0.4-1" +val thehiveVersion = "4.0.5-1" val scala212 = "2.12.12" val scala213 = "2.13.1" val supportedScalaVersions = List(scala212, scala213) diff --git a/conf/migration-logback.xml b/conf/migration-logback.xml index ff8293acf6..b003c354ff 100644 --- a/conf/migration-logback.xml +++ b/conf/migration-logback.xml @@ -5,7 +5,7 @@ converterClass="play.api.libs.logback.ColoredLevel"/> - ./logs/migration.log + ${application.home:-.}/logs/migration.log ${application.home:-.}/logs/application.%i.log.zip 1 diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/CortexSchemaDefinition.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/CortexSchemaDefinition.scala index ef457c42e8..1727dc9526 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/CortexSchemaDefinition.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/models/CortexSchemaDefinition.scala @@ -14,8 +14,7 @@ import scala.reflect.runtime.{universe => ru} class CortexSchemaDefinition @Inject() () extends Schema with UpdatableSchema { lazy val logger: Logger = Logger(getClass) - val name: String = "thehive-cortex" - val operations: Operations = Operations(name) + val operations: Operations = Operations("thehive-cortex") lazy val reflectionClasses = new Reflections( new ConfigurationBuilder() diff --git a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/Connector.scala b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/Connector.scala index 99435c545d..5bdb6d70e1 100644 --- a/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/Connector.scala +++ b/cortex/connector/src/main/scala/org/thp/thehive/connector/cortex/services/Connector.scala @@ -2,10 +2,13 @@ package org.thp.thehive.connector.cortex.services import akka.actor.ActorSystem import akka.stream.Materializer + import javax.inject.{Inject, Singleton} import org.thp.cortex.client.{CortexClient, CortexClientConfig} +import org.thp.scalligraph.models.SchemaStatus import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationFormat import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} +import org.thp.thehive.connector.cortex.models.CortexSchemaDefinition import org.thp.thehive.models.HealthStatus import org.thp.thehive.services.{Connector => TheHiveConnector} import play.api.libs.json.{JsObject, Json} @@ -17,6 +20,7 @@ import scala.util.{Failure, Success} @Singleton class Connector @Inject() ( appConfig: ApplicationConfig, + schemaDefinition: CortexSchemaDefinition, mat: Materializer, implicit val system: ActorSystem, implicit val ec: ExecutionContext @@ -44,10 +48,11 @@ class Connector @Inject() ( .traverse(clients)(_.getHealth) .foreach { healthStatus => val distinctStatus = healthStatus.toSet.map(HealthStatus.withName) - cachedHealth = if (distinctStatus.contains(HealthStatus.Ok)) { - if (distinctStatus.size > 1) HealthStatus.Warning else HealthStatus.Ok - } else if (distinctStatus.contains(HealthStatus.Error)) HealthStatus.Error - else HealthStatus.Warning + cachedHealth = + if (distinctStatus.contains(HealthStatus.Ok)) + if (distinctStatus.size > 1) HealthStatus.Warning else HealthStatus.Ok + else if (distinctStatus.contains(HealthStatus.Error)) HealthStatus.Error + else HealthStatus.Warning system.scheduler.scheduleOnce(statusCheckInterval)(updateHealth()) } @@ -67,9 +72,10 @@ class Connector @Inject() ( } .foreach { statusDetails => val distinctStatus = statusDetails.map(_._3).toSet - val healthStatus = if (distinctStatus.contains("OK")) { - if (distinctStatus.size > 1) "WARNING" else "OK" - } else "ERROR" + val healthStatus = + if (distinctStatus.contains("OK")) + if (distinctStatus.size > 1) "WARNING" else "OK" + else "ERROR" cachedStatus = Json.obj( "enabled" -> true, @@ -83,4 +89,5 @@ class Connector @Inject() ( } updateStatus() + override def schemaStatus: Option[SchemaStatus] = schemaDefinition.schemaStatus } diff --git a/cortex/connector/src/test/scala/org/thp/thehive/connector/cortex/services/TestConnector.scala b/cortex/connector/src/test/scala/org/thp/thehive/connector/cortex/services/TestConnector.scala index 3cda6c562a..b6dc5cd410 100644 --- a/cortex/connector/src/test/scala/org/thp/thehive/connector/cortex/services/TestConnector.scala +++ b/cortex/connector/src/test/scala/org/thp/thehive/connector/cortex/services/TestConnector.scala @@ -2,15 +2,23 @@ package org.thp.thehive.connector.cortex.services import akka.actor.ActorSystem import akka.stream.Materializer + import javax.inject.{Inject, Singleton} import org.thp.cortex.client.CortexClient import org.thp.scalligraph.services.config.ApplicationConfig +import org.thp.thehive.connector.cortex.models.CortexSchemaDefinition import scala.concurrent.ExecutionContext @Singleton -class TestConnector @Inject() (client: CortexClient, appConfig: ApplicationConfig, mat: Materializer, system: ActorSystem, ec: ExecutionContext) - extends Connector(appConfig, mat, system, ec) { +class TestConnector @Inject() ( + client: CortexClient, + appConfig: ApplicationConfig, + schemaDefinition: CortexSchemaDefinition, + mat: Materializer, + system: ActorSystem, + ec: ExecutionContext +) extends Connector(appConfig, schemaDefinition, mat, system, ec) { override def clients: Seq[CortexClient] = Seq(client) override protected def updateHealth(): Unit = () diff --git a/frontend/app/scripts/services/api/DashboardSrv.js b/frontend/app/scripts/services/api/DashboardSrv.js index 8acf253e95..ba5394b896 100644 --- a/frontend/app/scripts/services/api/DashboardSrv.js +++ b/frontend/app/scripts/services/api/DashboardSrv.js @@ -233,6 +233,7 @@ _.each(metadata.entities, function(entity) { metadata[entity] = _.omit(data[entity], 'attributes'); metadata[entity].attributes = self._objectifyBy(data[entity].attributes, 'name'); + metadata[entity].attributeKeys = _.keys(metadata[entity].attributes).sort(); }); self.metadata[version] = metadata; diff --git a/frontend/app/scripts/services/common/QueryBuilderSrv.js b/frontend/app/scripts/services/common/QueryBuilderSrv.js index 9574a44460..93cf6b3ce3 100644 --- a/frontend/app/scripts/services/common/QueryBuilderSrv.js +++ b/frontend/app/scripts/services/common/QueryBuilderSrv.js @@ -54,11 +54,47 @@ if(values.length > 0) { var criterions = _.map(values, function(val) { - var v = {_like: {}}; + return {_like: { + _field: filter.field, + _value: val + }}; + }); - v._like[filter.field] = val; + var criteria = {}; + switch(operator) { + case 'all': + criteria = criterions.length === 1 ? criterions[0] : { _and: criterions }; + break; + case 'none': + criteria = { + _not: criterions.length === 1 ? criterions[0] : { _or: criterions } + }; + break; + default: + criteria = criterions.length === 1 ? criterions[0] : { _or: criterions }; + } - return v; + return criteria; + } + + return null; + }; + + this._buildQueryFromTagsFilter = function(fieldDef, filter) { + if (!filter || !filter.value) { + return null; + } + var operator = filter.value.operator || 'any'; + var values = _.pluck(filter.value.list, 'text'); + + if(values.length > 0) { + var criterions = _.map(values, function(val) { + return { + _like: { + _field: filter.field, + _value: val + } + }; }); var criteria = {}; @@ -71,6 +107,7 @@ _not: criterions.length === 1 ? criterions[0] : { _or: criterions } }; break; + //case 'any': default: criteria = criterions.length === 1 ? criterions[0] : { _or: criterions }; } @@ -167,6 +204,8 @@ return this._buildQueryFromDateFilter(fieldDef, filter); } else if(filter.type === 'boolean') { return this._buildQueryFromBooleanFilter(fieldDef, filter); + } else if(filter.field === 'tags') { + return this._buildQueryFromTagsFilter(fieldDef, filter); } else if(filter.type === 'user' || filter.field === 'tags' || filter.type === 'enumeration') { return this._buildQueryFromListFilter(fieldDef, filter); } else if(filter.type === 'string' && fieldDef.values.length === 0) { diff --git a/frontend/app/scripts/services/ui/GlobalSearchSrv.js b/frontend/app/scripts/services/ui/GlobalSearchSrv.js index d836197ddd..33b964690f 100644 --- a/frontend/app/scripts/services/ui/GlobalSearchSrv.js +++ b/frontend/app/scripts/services/ui/GlobalSearchSrv.js @@ -55,11 +55,22 @@ }; this.buildDefaultFilterValue = function(fieldDef, value) { - if(fieldDef.name === 'tags' || fieldDef.type === 'user' || fieldDef.values.length > 0) { + + var valueId = value.id; + var valueName = value.name; + + if(valueId.startsWith('"') && valueId.endsWith('"')) { + valueId = valueId.slice (1, valueId.length-1); + } + if(valueName.startsWith('"') && valueName.endsWith('"')) { + valueName = valueName.slice (1, valueName.length-1); + } + + if(fieldDef.type === 'string' || fieldDef.name === 'tags' || fieldDef.type === 'user' || fieldDef.values.length > 0) { return { operator: 'any', list: [{ - text: (fieldDef.type === 'number' || fieldDef.type === 'integer') ? Number.parseInt(value.id) : value.id, label:value.name + text: (fieldDef.type === 'number' || fieldDef.type === 'integer') ? Number.parseInt(valueId) : valueId, label:valueName }] }; } else { @@ -67,14 +78,14 @@ case 'number': case 'integer': return { - value: Number.parseInt(value.id) + value: Number.parseInt(valueId) }; case 'boolean': - return value.id === 'true'; + return valueId === 'true'; default: - return value.id; + return valueId; } - return value.id; + return valueId; } }; diff --git a/frontend/app/views/directives/dashboard/filters.html b/frontend/app/views/directives/dashboard/filters.html index 8143f917d1..192d2f1041 100644 --- a/frontend/app/views/directives/dashboard/filters.html +++ b/frontend/app/views/directives/dashboard/filters.html @@ -10,7 +10,7 @@ diff --git a/frontend/app/views/directives/dashboard/multiline/serie.filters.html b/frontend/app/views/directives/dashboard/multiline/serie.filters.html index 60c98c79ba..7851464da7 100644 --- a/frontend/app/views/directives/dashboard/multiline/serie.filters.html +++ b/frontend/app/views/directives/dashboard/multiline/serie.filters.html @@ -10,7 +10,7 @@ diff --git a/frontend/app/views/directives/dashboard/serie.filters.html b/frontend/app/views/directives/dashboard/serie.filters.html index 1026953b8c..b9e8f5ff80 100644 --- a/frontend/app/views/directives/dashboard/serie.filters.html +++ b/frontend/app/views/directives/dashboard/serie.filters.html @@ -10,7 +10,7 @@ diff --git a/frontend/app/views/directives/dashboard/text/serie.filters.html b/frontend/app/views/directives/dashboard/text/serie.filters.html index ff38f8c3b6..817c11ee34 100644 --- a/frontend/app/views/directives/dashboard/text/serie.filters.html +++ b/frontend/app/views/directives/dashboard/text/serie.filters.html @@ -10,7 +10,7 @@ diff --git a/frontend/app/views/partials/case/case.close.html b/frontend/app/views/partials/case/case.close.html index 5d45dbbe83..1abb66a9c8 100644 --- a/frontend/app/views/partials/case/case.close.html +++ b/frontend/app/views/partials/case/case.close.html @@ -6,7 +6,9 @@
- This case contains the following open or unassigned tasks + This case contains the following open or unassigned tasks. Closing the case will permanently remove the unassigned ones. +
+ This action cannot be undone.
diff --git a/frontend/bower.json b/frontend/bower.json index 6a6988cc69..4531f61020 100644 --- a/frontend/bower.json +++ b/frontend/bower.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.4-1", + "version": "4.0.5-1", "license": "AGPL-3.0", "dependencies": { "jquery": "^3.4.1", diff --git a/frontend/package.json b/frontend/package.json index 05da8f241d..1e768df322 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "thehive", - "version": "4.0.4-1", + "version": "4.0.5-1", "license": "AGPL-3.0", "repository": { "type": "git", diff --git a/misp/client/src/main/scala/org/thp/misp/dto/Organisation.scala b/misp/client/src/main/scala/org/thp/misp/dto/Organisation.scala index 1552d83b01..f8a6486110 100644 --- a/misp/client/src/main/scala/org/thp/misp/dto/Organisation.scala +++ b/misp/client/src/main/scala/org/thp/misp/dto/Organisation.scala @@ -4,7 +4,7 @@ import java.util.UUID import play.api.libs.json.{Json, Reads} -case class Organisation(id: String, name: String, description: String, uuid: UUID) +case class Organisation(id: String, name: String, description: Option[String], uuid: UUID) object Organisation { implicit val reads: Reads[Organisation] = Json.reads[Organisation] diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala index cc9e0b8236..46a24bce0f 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispActor.scala @@ -12,8 +12,7 @@ case object Synchro extends MispMessage class MispActor @Inject() ( connector: Connector, - mispImportSrv: MispImportSrv, - userSrv: UserSrv + mispImportSrv: MispImportSrv ) extends Actor { import context.dispatcher @@ -34,7 +33,7 @@ class MispActor @Inject() ( scheduledSynchronisation.cancel() logger.info(s"Synchronising MISP events for ${connector.clients.map(_.name).mkString(",")}") connector.clients.filter(_.canImport).foreach { mispClient => - mispImportSrv.syncMispEvents(mispClient)(userSrv.getSystemAuthContext) + mispImportSrv.syncMispEvents(mispClient) } logger.info("MISP synchronisation is complete") context.become(receive(context.system.scheduler.scheduleOnce(connector.syncInterval, self, Synchro))) diff --git a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala index f4a5990d2b..39f8ebb1ca 100644 --- a/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala +++ b/misp/connector/src/main/scala/org/thp/thehive/connector/misp/services/MispImportSrv.scala @@ -6,7 +6,7 @@ import akka.util.ByteString import org.apache.tinkerpop.gremlin.process.traversal.P import org.apache.tinkerpop.gremlin.structure.Graph import org.thp.misp.dto.{Attribute, Event, Tag => MispTag} -import org.thp.scalligraph.auth.AuthContext +import org.thp.scalligraph.auth.{AuthContext, UserSrv} import org.thp.scalligraph.controllers.FFile import org.thp.scalligraph.models._ import org.thp.scalligraph.traversal.TraversalOps._ @@ -17,7 +17,7 @@ import org.thp.thehive.models._ import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.ObservableOps._ import org.thp.thehive.services.OrganisationOps._ -import org.thp.thehive.services._ +import org.thp.thehive.services.{UserSrv => _, _} import play.api.Logger import play.api.libs.json._ @@ -38,6 +38,7 @@ class MispImportSrv @Inject() ( attachmentSrv: AttachmentSrv, caseTemplateSrv: CaseTemplateSrv, auditSrv: AuditSrv, + userSrv: UserSrv, @Named("with-thehive-schema") db: Database, implicit val ec: ExecutionContext, implicit val mat: Materializer @@ -378,13 +379,12 @@ class MispImportSrv @Inject() ( } } - def syncMispEvents(client: TheHiveMispClient)(implicit authContext: AuthContext): Unit = + def syncMispEvents(client: TheHiveMispClient): Unit = client .currentOrganisationName .fold( error => logger.error("Unable to get MISP organisation", error), mispOrganisation => { - val caseTemplate = client.caseTemplate.flatMap { caseTemplateName => db.roTransaction { implicit graph => caseTemplateSrv.get(EntityName(caseTemplateName)).headOption @@ -406,6 +406,7 @@ class MispImportSrv @Inject() ( QueueIterator(queue).foreach { event => logger.debug(s"Importing event ${client.name}#${event.id} in organisation(s): ${organisations.mkString(",")}") organisations.foreach { organisation => + implicit val authContext: AuthContext = userSrv.getSystemAuthContext.changeOrganisation(organisation._id, Profile.admin.permissions) db.tryTransaction { implicit graph => auditSrv.mergeAudits { updateOrCreateAlert(client, organisation, mispOrganisation, event, caseTemplate) diff --git a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala index 97042b73c9..46d2746f1c 100644 --- a/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala +++ b/misp/connector/src/test/scala/org/thp/thehive/connector/misp/services/MispImportSrvTest.scala @@ -1,24 +1,17 @@ package org.thp.thehive.connector.misp.services -import java.util.{Date, UUID} - import akka.stream.Materializer import akka.stream.scaladsl.Sink import org.thp.misp.dto.{Event, Organisation, Tag, User} +import org.thp.scalligraph.AppBuilder import org.thp.scalligraph.auth.AuthContext -import org.thp.scalligraph.models.{Database, DummyUserSrv} -import org.thp.scalligraph.traversal.TraversalOps._ -import org.thp.scalligraph.{AppBuilder, EntityName} +import org.thp.scalligraph.models.DummyUserSrv import org.thp.thehive.TestAppBuilder -import org.thp.thehive.models.{Alert, Permissions} -import org.thp.thehive.services.AlertOps._ -import org.thp.thehive.services.ObservableOps._ -import org.thp.thehive.services.OrganisationOps._ -import org.thp.thehive.services.{AlertSrv, OrganisationSrv} +import org.thp.thehive.models.Permissions import play.api.test.PlaySpecification +import java.util.{Date, UUID} import scala.concurrent.ExecutionContext -import scala.concurrent.duration.DurationInt class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification with TestAppBuilder { sequential @@ -37,7 +30,7 @@ class MispImportSrvTest(implicit ec: ExecutionContext) extends PlaySpecification "get organisation" in testApp { app => await(app[TheHiveMispClient].getOrganisation("1")) must beEqualTo( - Organisation("1", "ORGNAME", "Automatically generated admin organisation", UUID.fromString("5d5d066f-cfa4-49da-995c-6d5b68257ab4")) + Organisation("1", "ORGNAME", Some("Automatically generated admin organisation"), UUID.fromString("5d5d066f-cfa4-49da-995c-6d5b68257ab4")) ) } diff --git a/package/debian/postinst b/package/debian/postinst index 4f0d036668..05daa43d5a 100755 --- a/package/debian/postinst +++ b/package/debian/postinst @@ -81,7 +81,7 @@ case "$1" in addUser thehive "" thehive "thehive daemon-user" "/bin/false" # Generate secret key if ! test -e /etc/thehive/secret.conf; then - key=$(tr -dc 'a-zA-Z0-9' < /dev/urandom | fold -w 64 | head -n 1) + key=$(dd if=/dev/urandom bs=1024 count=1 | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1) echo "play.http.secret.key=\"$key\"" > /etc/thehive/secret.conf fi @@ -99,4 +99,4 @@ case "$1" in echo "postinst called with unknown argument \`$1'" >&2 exit 1 ;; -esac \ No newline at end of file +esac diff --git a/package/docker/entrypoint b/package/docker/entrypoint index cbf58c58ff..2f355f0668 100755 --- a/package/docker/entrypoint +++ b/package/docker/entrypoint @@ -81,7 +81,7 @@ then then if test -z "${SECRET}" then - SECRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1) + SECRET=$(dd if=/dev/urandom bs=1024 count=1 | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1) test "${SHOW_SECRET}" = 1 && echo Using secret: ${SECRET} fi echo "play.http.secret.key = \"${SECRET}\"" >> ${CONFIG_FILE} diff --git a/package/logback.xml b/package/logback.xml index dca626919e..4b0ef2a674 100644 --- a/package/logback.xml +++ b/package/logback.xml @@ -7,7 +7,7 @@ /var/log/thehive/application.log - /var/log/logs/application.%i.log.zip + /var/log/thehive/application.%i.log.zip 1 10 diff --git a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala index 9f1638998d..dd13a7bea7 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/AlertCtrl.scala @@ -21,10 +21,10 @@ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.TagOps._ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ -import play.api.libs.json.{JsArray, JsObject, Json} +import play.api.libs.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString, JsValue, Json} import play.api.mvc.{Action, AnyContent, Results} -import java.util.{Base64, List => JList, Map => JMap} +import java.util.{Base64, Date, List => JList, Map => JMap} import javax.inject.{Inject, Named, Singleton} import scala.util.{Failure, Success, Try} @@ -430,24 +430,64 @@ class PublicAlert @Inject() ( .property("summary", UMapping.string.optional)(_.field.updatable) .property("user", UMapping.string)(_.field.updatable) .property("customFields", UMapping.jsonNative)(_.subSelect { - case (FPathElem(_, FPathElem(name, _)), alertSteps) => - alertSteps.customFields(EntityIdOrName(name)).jsonValue - case (_, alertSteps) => alertSteps.customFields.nameJsonValue.fold.domainMap(JsObject(_)) - }.custom { - case (FPathElem(_, FPathElem(name, _)), value, vertex, _, graph, authContext) => - for { - c <- alertSrv.getByIds(EntityId(vertex.id))(graph).getOrFail("Alert") - _ <- alertSrv.setOrCreateCustomField(c, InputCustomFieldValue(name, Some(value), None))(graph, authContext) - } yield Json.obj(s"customField.$name" -> value) - case (FPathElem(_, FPathEmpty), values: JsObject, vertex, _, graph, authContext) => - for { - c <- alertSrv.get(vertex)(graph).getOrFail("Alert") - cfv <- values.fields.toTry { case (n, v) => customFieldSrv.getOrFail(EntityIdOrName(n))(graph).map(_ -> v) } - _ <- alertSrv.updateCustomField(c, cfv)(graph, authContext) - } yield Json.obj("customFields" -> values) + case (FPathElem(_, FPathElem(name, _)), alerts) => + db + .roTransaction(implicit graph => customFieldSrv.get(EntityIdOrName(name)).value(_.`type`).getOrFail("CustomField")) + .map { + case CustomFieldType.boolean => alerts.customFields(EntityIdOrName(name)).value(_.booleanValue).domainMap(v => JsBoolean(v)) + case CustomFieldType.date => alerts.customFields(EntityIdOrName(name)).value(_.dateValue).domainMap(v => JsNumber(v.getTime)) + case CustomFieldType.float => alerts.customFields(EntityIdOrName(name)).value(_.floatValue).domainMap(v => JsNumber(v)) + case CustomFieldType.integer => alerts.customFields(EntityIdOrName(name)).value(_.integerValue).domainMap(v => JsNumber(v)) + case CustomFieldType.string => alerts.customFields(EntityIdOrName(name)).value(_.stringValue).domainMap(v => JsString(v)) + } + .getOrElse(alerts.constant2(null)) + case (_, caseSteps) => caseSteps.customFields.nameJsonValue.fold.domainMap(JsObject(_)) + } + .filter { + case (FPathElem(_, FPathElem(idOrName, _)), alerts) => + db + .roTransaction(implicit graph => customFieldSrv.get(EntityIdOrName(idOrName)).value(_.`type`).getOrFail("CustomField")) + .map { + case CustomFieldType.boolean => alerts.customFields(EntityIdOrName(idOrName)).value(_.booleanValue) + case CustomFieldType.date => alerts.customFields(EntityIdOrName(idOrName)).value(_.dateValue) + case CustomFieldType.float => alerts.customFields(EntityIdOrName(idOrName)).value(_.floatValue) + case CustomFieldType.integer => alerts.customFields(EntityIdOrName(idOrName)).value(_.integerValue) + case CustomFieldType.string => alerts.customFields(EntityIdOrName(idOrName)).value(_.stringValue) + } + .getOrElse(alerts.constant2(null)) + case (_, alerts) => alerts.constant2(null) + } + .converter { + case FPathElem(_, FPathElem(idOrName, _)) => + db + .roTransaction { implicit graph => + customFieldSrv.get(EntityIdOrName(idOrName)).value(_.`type`).getOrFail("CustomField") + } + .map { + case CustomFieldType.boolean => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Boolean] } + case CustomFieldType.date => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Date] } + case CustomFieldType.float => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Double] } + case CustomFieldType.integer => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Long] } + case CustomFieldType.string => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[String] } + } + .getOrElse((x: JsValue) => x) + case _ => (x: JsValue) => x + } + .custom { + case (FPathElem(_, FPathElem(name, _)), value, vertex, _, graph, authContext) => + for { + c <- alertSrv.getByIds(EntityId(vertex.id))(graph).getOrFail("Alert") + _ <- alertSrv.setOrCreateCustomField(c, InputCustomFieldValue(name, Some(value), None))(graph, authContext) + } yield Json.obj(s"customField.$name" -> value) + case (FPathElem(_, FPathEmpty), values: JsObject, vertex, _, graph, authContext) => + for { + c <- alertSrv.get(vertex)(graph).getOrFail("Alert") + cfv <- values.fields.toTry { case (n, v) => customFieldSrv.getOrFail(EntityIdOrName(n))(graph).map(_ -> v) } + _ <- alertSrv.updateCustomField(c, cfv)(graph, authContext) + } yield Json.obj("customFields" -> values) - case _ => Failure(BadRequestError("Invalid custom fields format")) - }) + case _ => Failure(BadRequestError("Invalid custom fields format")) + }) .property("case", db.idMapping)(_.select(_.`case`._id).readonly) .property("imported", UMapping.boolean)(_.select(_.imported).readonly) .property("importDate", UMapping.date.optional)(_.select(_.importDate).readonly) diff --git a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala index 3e6782c2ff..8ffe733918 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/CaseCtrl.scala @@ -1,9 +1,5 @@ package org.thp.thehive.controllers.v0 -import java.lang.{Long => JLong} -import java.util.Date - -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{Entrypoint, FPathElem, FPathEmpty, FieldsParser} import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query._ @@ -24,6 +20,8 @@ import org.thp.thehive.services._ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, Results} +import java.util.Date +import javax.inject.{Inject, Named, Singleton} import scala.util.{Failure, Success} @Singleton @@ -328,7 +326,7 @@ class PublicCase @Inject() ( case CustomFieldType.integer => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[Long] } case CustomFieldType.string => new Converter[Any, JsValue] { def apply(x: JsValue): Any = x.as[String] } } - .getOrElse(new Converter[Any, JsValue] { def apply(x: JsValue): Any = x }) + .getOrElse((x: JsValue) => x) case _ => (x: JsValue) => x } .custom { diff --git a/thehive/app/org/thp/thehive/controllers/v0/DashboardCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/DashboardCtrl.scala index 309afaaf5f..60ae86a1d4 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/DashboardCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/DashboardCtrl.scala @@ -1,6 +1,5 @@ package org.thp.thehive.controllers.v0 -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{Entrypoint, FString, FieldsParser} import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query._ @@ -17,7 +16,8 @@ import org.thp.thehive.services.{DashboardSrv, OrganisationSrv, UserSrv} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, Results} -import scala.util.Failure +import javax.inject.{Inject, Named, Singleton} +import scala.util.{Failure, Success} @Singleton class DashboardCtrl @Inject() ( @@ -33,7 +33,16 @@ class DashboardCtrl @Inject() ( .extract("dashboard", FieldsParser[InputDashboard]) .authTransaction(db) { implicit request => implicit graph => val dashboard: InputDashboard = request.body("dashboard") - dashboardSrv.create(dashboard.toDashboard).map(d => Results.Created(d.toJson)) + dashboardSrv + .create(dashboard.toDashboard) + .flatMap { + case richDashboard if dashboard.status == "Shared" => + dashboardSrv + .share(richDashboard.dashboard, request.organisation, writable = false) + .flatMap(_ => dashboardSrv.get(richDashboard.dashboard).richDashboard.getOrFail("Dashboard")) + case richDashboard => Success(richDashboard) + } + .map(richDashboard => Results.Created(richDashboard.toJson)) } def get(dashboardId: String): Action[AnyContent] = diff --git a/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala index e7bcf77a20..1fbfc4b9ec 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/DescribeCtrl.scala @@ -35,7 +35,7 @@ class DescribeCtrl @Inject() ( observableCtrl: ObservableCtrl, observableTypeCtrl: ObservableTypeCtrl, organisationCtrl: OrganisationCtrl, - pageCtrl: PageCtrl, +// pageCtrl: PageCtrl, profileCtrl: ProfileCtrl, taskCtrl: TaskCtrl, userCtrl: UserCtrl, @@ -118,8 +118,8 @@ class DescribeCtrl @Inject() ( organisationCtrl.publicData.publicProperties.list.flatMap(propertyToJson("organisation", _)) ), EntityDescription("profile", "/profile", profileCtrl.publicData.publicProperties.list.flatMap(propertyToJson("profile", _))), - EntityDescription("dashboard", "/dashboard", dashboardCtrl.publicData.publicProperties.list.flatMap(propertyToJson("dashboard", _))), - EntityDescription("page", "/page", pageCtrl.publicData.publicProperties.list.flatMap(propertyToJson("page", _))) + EntityDescription("dashboard", "/dashboard", dashboardCtrl.publicData.publicProperties.list.flatMap(propertyToJson("dashboard", _))) +// EntityDescription("page", "/page", pageCtrl.publicData.publicProperties.list.flatMap(propertyToJson("page", _))) ) ++ describeCortexEntity("case_artifact_job", "/connector/cortex/job", "JobCtrl") ++ describeCortexEntity("action", "/connector/cortex/action", "ActionCtrl") } diff --git a/thehive/app/org/thp/thehive/controllers/v0/QueryCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/QueryCtrl.scala index 3577df52f3..b6c1028f17 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/QueryCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/QueryCtrl.scala @@ -41,7 +41,7 @@ trait QueryCtrl { .paramParser(queryType) val aggregationParser: FieldsParser[Aggregation] = - TH3Aggregation.fieldsParser + TH3Aggregation.fieldsParser(inputFilterParser) val sortParser: FieldsParser[InputSort] = FieldsParser("sort") { case (_, FAny(s)) => Good(s.flatMap(_.split(','))) diff --git a/thehive/app/org/thp/thehive/controllers/v0/Router.scala b/thehive/app/org/thp/thehive/controllers/v0/Router.scala index e29481df34..56093f66d8 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/Router.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/Router.scala @@ -7,193 +7,192 @@ import play.api.routing.sird._ @Singleton class Router @Inject() ( - statsCtrl: StatsCtrl, + alertCtrl: AlertCtrl, + attachmentCtrl: AttachmentCtrl, + auditCtrl: AuditCtrl, + authenticationCtrl: AuthenticationCtrl, caseCtrl: CaseCtrl, caseTemplateCtrl: CaseTemplateCtrl, - userCtrl: UserCtrl, - organisationCtrl: OrganisationCtrl, - taskCtrl: TaskCtrl, - logCtrl: LogCtrl, - observableCtrl: ObservableCtrl, + configCtrl: ConfigCtrl, customFieldCtrl: CustomFieldCtrl, - alertCtrl: AlertCtrl, - auditCtrl: AuditCtrl, - statusCtrl: StatusCtrl, dashboardCtrl: DashboardCtrl, - authenticationCtrl: AuthenticationCtrl, - listCtrl: ListCtrl, - streamCtrl: StreamCtrl, - attachmentCtrl: AttachmentCtrl, describeCtrl: DescribeCtrl, - configCtrl: ConfigCtrl, + listCtrl: ListCtrl, + logCtrl: LogCtrl, + observableCtrl: ObservableCtrl, + observableTypeCtrl: ObservableTypeCtrl, + organisationCtrl: OrganisationCtrl, + pageCtrl: PageCtrl, + permissionCtrl: PermissionCtrl, profileCtrl: ProfileCtrl, shareCtrl: ShareCtrl, + statsCtrl: StatsCtrl, + statusCtrl: StatusCtrl, tagCtrl: TagCtrl, - pageCtrl: PageCtrl, - permissionCtrl: PermissionCtrl, - observableTypeCtrl: ObservableTypeCtrl + taskCtrl: TaskCtrl, + streamCtrl: StreamCtrl, + userCtrl: UserCtrl ) extends SimpleRouter { override def routes: Routes = { - - case POST(p"/_stats") => statsCtrl.stats - - case GET(p"/status") => statusCtrl.get - case GET(p"/health") => statusCtrl.health - case GET(p"/logout") => authenticationCtrl.logout - case POST(p"/logout") => authenticationCtrl.logout - case POST(p"/login") => authenticationCtrl.login -// case POST(p"/ssoLogin") => authenticationCtrl.ssoLogin - - case DELETE(p"/case/shares") => shareCtrl.removeShares() case POST(p"/case/$caseId/shares") => shareCtrl.shareCase(caseId) - case DELETE(p"/case/$caseId/shares") => shareCtrl.removeShares(caseId) - case DELETE(p"/task/$taskId/shares") => shareCtrl.removeTaskShares(taskId) - case DELETE(p"/observable/$observableId/shares") => shareCtrl.removeObservableShares(observableId) case GET(p"/case/$caseId/shares") => shareCtrl.listShareCases(caseId) + case DELETE(p"/case/$caseId/shares") => shareCtrl.removeShares(caseId) + case DELETE(p"/case/shares") => shareCtrl.removeShares() + case POST(p"/case/task/$taskId/shares") => shareCtrl.shareTask(taskId) case GET(p"/case/$caseId/task/$taskId/shares") => shareCtrl.listShareTasks(caseId, taskId) + case DELETE(p"/task/$taskId/shares") => shareCtrl.removeTaskShares(taskId) + case POST(p"/case/artifact/$observableId/shares") => shareCtrl.shareObservable(observableId) case GET(p"/case/$caseId/observable/$observableId/shares") => shareCtrl.listShareObservables(caseId, observableId) - case POST(p"/case/task/$taskId/shares") => shareCtrl.shareTask(taskId) + case DELETE(p"/observable/$observableId/shares") => shareCtrl.removeObservableShares(observableId) case DELETE(p"/case/share/$shareId") => shareCtrl.removeShare(shareId) case PATCH(p"/case/share/$shareId") => shareCtrl.updateShare(shareId) - case GET(p"/case/task") => taskCtrl.search - case POST(p"/case/$caseId/task") => taskCtrl.create(caseId) // Audit ok - case GET(p"/case/task/$taskId") => taskCtrl.get(taskId) - case PATCH(p"/case/task/$taskId") => taskCtrl.update(taskId) // Audit ok - case POST(p"/case/task/_search") => taskCtrl.search - case POST(p"/case/task/_stats") => taskCtrl.stats - case POST(p"/case/$caseId/task/_search") => taskCtrl.searchInCase(caseId) + case GET(p"/alert") => alertCtrl.search + case POST(p"/alert/_search") => alertCtrl.search + case POST(p"/alert/_stats") => alertCtrl.stats + case POST(p"/alert") => alertCtrl.create + case GET(p"/alert/$alertId") => alertCtrl.get(alertId) + case PATCH(p"/alert/$alertId") => alertCtrl.update(alertId) + case POST(p"/alert/delete/_bulk") => alertCtrl.bulkDelete + case DELETE(p"/alert/$alertId") => alertCtrl.delete(alertId) + case POST(p"/alert/merge/_bulk") => alertCtrl.bulkMergeWithCase + case POST(p"/alert/$alertId/merge/$caseId") => alertCtrl.mergeWithCase(alertId, caseId) + case POST(p"/alert/$alertId/markAsRead") => alertCtrl.markAsRead(alertId) + case POST(p"/alert/$alertId/markAsUnread") => alertCtrl.markAsUnread(alertId) + case POST(p"/alert/$alertId/follow") => alertCtrl.followAlert(alertId) + case POST(p"/alert/$alertId/unfollow") => alertCtrl.unfollowAlert(alertId) + case POST(p"/alert/$alertId/createCase") => alertCtrl.createCase(alertId) + // PATCH /alert/_bulk controllers.AlertCtrl.bulkUpdate + + case GET(p"/datastore/$id" ? q_o"name=$name") => attachmentCtrl.download(id, name) + case GET(p"/datastorezip/$id" ? q_o"name=$name") => attachmentCtrl.downloadZip(id, name) + + case GET(p"/audit") => auditCtrl.search + case POST(p"/audit/_search") => auditCtrl.search + case POST(p"/audit/_stats") => auditCtrl.stats + case GET(p"/audit") => auditCtrl.flow(None) + case GET(p"/flow" ? q_o"rootId=$rootId") => auditCtrl.flow(rootId) + + case POST(p"/login") => authenticationCtrl.login + case GET(p"/logout") => authenticationCtrl.logout + case POST(p"/logout") => authenticationCtrl.logout + + case GET(p"/case/template") => caseTemplateCtrl.search + case POST(p"/case/template/_search") => caseTemplateCtrl.search + case POST(p"/case/template/_stats") => caseTemplateCtrl.stats + case POST(p"/case/template") => caseTemplateCtrl.create + case GET(p"/case/template/$caseTemplateId") => caseTemplateCtrl.get(caseTemplateId) + case PATCH(p"/case/template/$caseTemplateId") => caseTemplateCtrl.update(caseTemplateId) + case DELETE(p"/case/template/$caseTemplateId") => caseTemplateCtrl.delete(caseTemplateId) - //case GET(p"/case/task/$taskId/log") => logCtrl.findInTask(taskId) - //case POST(p"/case/task/$taskId/log/_search") => logCtrl.findInTask(taskId) case POST(p"/case/task/log/_search") => logCtrl.search case POST(p"/case/task/log/_stats") => logCtrl.stats - case POST(p"/case/task/$taskId/log") => logCtrl.create(taskId) // Audit ok - case PATCH(p"/case/task/log/$logId") => logCtrl.update(logId) // Audit ok - case DELETE(p"/case/task/log/$logId") => logCtrl.delete(logId) // Audit ok, weird logs/silent errors though (stream related) + case POST(p"/case/task/$taskId/log") => logCtrl.create(taskId) + case PATCH(p"/case/task/log/$logId") => logCtrl.update(logId) + case DELETE(p"/case/task/log/$logId") => logCtrl.delete(logId) + //case GET(p"/case/task/$taskId/log") => logCtrl.findInTask(taskId) + //case POST(p"/case/task/$taskId/log/_search") => logCtrl.findInTask(taskId) // case GET(p"/case/task/log/$logId") => logCtrl.get(logId) - case POST(p"/case/artifact/_search") => observableCtrl.search - // case POST(p"/case/:caseId/artifact/_search") => observableCtrl.findInCase(caseId) + case GET(p"/case/task") => taskCtrl.search + case POST(p"/case/task/_search") => taskCtrl.search + case POST(p"/case/task/_stats") => taskCtrl.stats + case POST(p"/case/$caseId/task") => taskCtrl.create(caseId) + case GET(p"/case/task/$taskId") => taskCtrl.get(taskId) + case PATCH(p"/case/task/$taskId") => taskCtrl.update(taskId) + case POST(p"/case/$caseId/task/_search") => taskCtrl.searchInCase(caseId) + + case POST(p"/case/artifact/_search") => observableCtrl.search case POST(p"/case/artifact/_stats") => observableCtrl.stats - case POST(p"/case/$caseId/artifact") => observableCtrl.createInCase(caseId) // Audit ok + case POST(p"/case/$caseId/artifact") => observableCtrl.createInCase(caseId) case GET(p"/case/artifact/$observableId") => observableCtrl.get(observableId) - case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok - case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok - case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok + case PATCH(p"/case/artifact/_bulk") => observableCtrl.bulkUpdate + case PATCH(p"/case/artifact/$observableId") => observableCtrl.update(observableId) + case DELETE(p"/case/artifact/$observableId") => observableCtrl.delete(observableId) + case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) + case GET(p"/alert/artifact/$observableId") => observableCtrl.get(observableId) + case PATCH(p"/alert/artifact/_bulk") => observableCtrl.bulkUpdate + case PATCH(p"/alert/artifact/$observableId") => observableCtrl.update(observableId) + case DELETE(p"/alert/artifact/$observableId") => observableCtrl.delete(observableId) case GET(p"/case/artifact/$observableId/similar") => observableCtrl.findSimilar(observableId) - case POST(p"/case/artifact/$observableId/shares") => shareCtrl.shareObservable(observableId) - case POST(p"/alert/$alertId/artifact") => observableCtrl.createInAlert(alertId) // Audit ok - case PATCH(p"/alert/artifact/$observableId") => observableCtrl.update(observableId) // Audit ok - case PATCH(p"/alert/artifact/_bulk") => observableCtrl.bulkUpdate // Audit ok - case DELETE(p"/alert/artifact/$observableId") => observableCtrl.delete(observableId) // Audit ok + // case POST(p"/case/:caseId/artifact/_search") => observableCtrl.findInCase(caseId) case GET(p"/case") => caseCtrl.search - case POST(p"/case") => caseCtrl.create // Audit ok - case GET(p"/case/$caseId") => caseCtrl.get(caseId) - case PATCH(p"/case/_bulk") => caseCtrl.bulkUpdate // Not used by the frontend - case PATCH(p"/case/$caseId") => caseCtrl.update(caseId) // Audit ok - case POST(p"/case/_merge/$caseIds") => caseCtrl.merge(caseIds) // Not implemented in backend and not used by frontend case POST(p"/case/_search") => caseCtrl.search case POST(p"/case/_stats") => caseCtrl.stats + case POST(p"/case") => caseCtrl.create + case GET(p"/case/$caseId") => caseCtrl.get(caseId) + case PATCH(p"/case/_bulk") => caseCtrl.bulkUpdate // Not used by the frontend + case PATCH(p"/case/$caseId") => caseCtrl.update(caseId) case DELETE(p"/case/$caseId") => caseCtrl.delete(caseId) // Not used by the frontend - case DELETE(p"/case/$caseId/force") => caseCtrl.delete(caseId) // Audit ok + case DELETE(p"/case/$caseId/force") => caseCtrl.delete(caseId) + case POST(p"/case/_merge/$caseIds") => caseCtrl.merge(caseIds) // Not implemented in backend and not used by frontend case GET(p"/case/$caseId/links") => caseCtrl.linkedCases(caseId) - case GET(p"/case/template") => caseTemplateCtrl.search - case POST(p"/case/template") => caseTemplateCtrl.create // Audit ok - case GET(p"/case/template/$caseTemplateId") => caseTemplateCtrl.get(caseTemplateId) - case PATCH(p"/case/template/$caseTemplateId") => caseTemplateCtrl.update(caseTemplateId) // Audit ok - case POST(p"/case/template/_search") => caseTemplateCtrl.search - case DELETE(p"/case/template/$caseTemplateId") => caseTemplateCtrl.delete(caseTemplateId) // Audit ok + case GET(p"/config/user") => configCtrl.userList + case GET(p"/config/user/$path") => configCtrl.userGet(path) + case PUT(p"/config/user/$path") => configCtrl.userSet(path) + case GET(p"/config/organisation") => configCtrl.organisationList + case GET(p"/config/organisation/$path") => configCtrl.organisationGet(path) + case PUT(p"/config/organisation/$path") => configCtrl.organisationSet(path) + case GET(p"/config") => configCtrl.list + case GET(p"/config/$path") => configCtrl.get(path) + case PUT(p"/config/$path") => configCtrl.set(path) - case GET(p"/user") => userCtrl.search - case POST(p"/user") => userCtrl.create // Audit ok - case GET(p"/user/current") => userCtrl.current - case GET(p"/user/$userId") => userCtrl.get(userId) - case PATCH(p"/user/$userId") => userCtrl.update(userId) // Audit ok - case DELETE(p"/user/$userId") => userCtrl.lock(userId) // Audit ok - case DELETE(p"/user/$userId/force") => userCtrl.delete(userId) // Audit ok - case POST(p"/user/$userId/password/set") => userCtrl.setPassword(userId) // Audit ok - case POST(p"/user/$userId/password/change") => userCtrl.changePassword(userId) // Audit ok - case GET(p"/user/$userId/key") => userCtrl.getKey(userId) - case DELETE(p"/user/$userId/key") => userCtrl.removeKey(userId) // Audit ok - case POST(p"/user/$userId/key/renew") => userCtrl.renewKey(userId) // Audit ok - case POST(p"/user/_search") => userCtrl.search + case GET(p"/customField") => customFieldCtrl.list + case POST(p"/customFields/_search") => customFieldCtrl.search + case POST(p"/customFields/_stats") => customFieldCtrl.stats + case POST(p"/customField") => customFieldCtrl.create + case GET(p"/customField/$id") => customFieldCtrl.get(id) + case PATCH(p"/customField/$id") => customFieldCtrl.update(id) + case DELETE(p"/customField/$id") => customFieldCtrl.delete(id) + case GET(p"/customFields/$id/use") => customFieldCtrl.useCount(id) + + case GET(p"/dashboard") => dashboardCtrl.search + case POST(p"/dashboard/_search") => dashboardCtrl.search + case POST(p"/dashboard/_stats") => dashboardCtrl.stats + case POST(p"/dashboard") => dashboardCtrl.create + case GET(p"/dashboard/$dashboardId") => dashboardCtrl.get(dashboardId) + case PATCH(p"/dashboard/$dashboardId") => dashboardCtrl.update(dashboardId) + case DELETE(p"/dashboard/$dashboardId") => dashboardCtrl.delete(dashboardId) + + case GET(p"/describe/_all") => describeCtrl.describeAll + case GET(p"/describe/$modelName") => describeCtrl.describe(modelName) case GET(p"/list") => listCtrl.list - case DELETE(p"/list/$itemId") => listCtrl.deleteItem(itemId) - case PATCH(p"/list/$itemId") => listCtrl.updateItem(itemId) case POST(p"/list/$listName") => listCtrl.addItem(listName) case GET(p"/list/$listName") => listCtrl.listItems(listName) + case PATCH(p"/list/$itemId") => listCtrl.updateItem(itemId) + case DELETE(p"/list/$itemId") => listCtrl.deleteItem(itemId) case POST(p"/list/$listName/_exists") => listCtrl.itemExists(listName) + case GET(p"/observable/type") => observableTypeCtrl.search + case POST(p"/observable/type/_search") => observableTypeCtrl.search + case POST(p"/observable/type/_stats") => observableTypeCtrl.stats + case POST(p"/observable/type") => observableTypeCtrl.create + case GET(p"/observable/type/$idOrName") => observableTypeCtrl.get(idOrName) + case DELETE(p"/observable/type/$idOrName") => observableTypeCtrl.delete(idOrName) + case GET(p"/organisation") => organisationCtrl.list - case POST(p"/organisation") => organisationCtrl.create // Audit ok + case POST(p"/organisation/_search") => organisationCtrl.search + case POST(p"/organisation/_stats") => organisationCtrl.stats + case POST(p"/organisation") => organisationCtrl.create case GET(p"/organisation/$organisationId") => organisationCtrl.get(organisationId) - case GET(p"/organisation/$organisationId/links") => organisationCtrl.listLinks(organisationId) - case PATCH(p"/organisation/$organisationId") => organisationCtrl.update(organisationId) // Audit ok + case PATCH(p"/organisation/$organisationId") => organisationCtrl.update(organisationId) case PUT(p"/organisation/$organisationId1/link/$organisationId2") => organisationCtrl.link(organisationId1, organisationId2) - case PUT(p"/organisation/$organisationId1/links") => organisationCtrl.bulkLink(organisationId1) + case PUT(p"/organisation/$organisationId/links") => organisationCtrl.bulkLink(organisationId) + case GET(p"/organisation/$organisationId/links") => organisationCtrl.listLinks(organisationId) case DELETE(p"/organisation/$organisationId1/link/$organisationId2") => organisationCtrl.unlink(organisationId1, organisationId2) - case GET(p"/customField") => customFieldCtrl.list - case POST(p"/customField") => customFieldCtrl.create - case GET(p"/customField/$id") => customFieldCtrl.get(id) - case DELETE(p"/customField/$id") => customFieldCtrl.delete(id) - case PATCH(p"/customField/$id") => customFieldCtrl.update(id) - case GET(p"/customFields/$id/use") => customFieldCtrl.useCount(id) - - case GET(p"/alert") => alertCtrl.search - case POST(p"/alert") => alertCtrl.create // Audit ok - case GET(p"/alert/$alertId") => alertCtrl.get(alertId) - case PATCH(p"/alert/$alertId") => alertCtrl.update(alertId) // Audit ok - case POST(p"/alert/$alertId/markAsRead") => alertCtrl.markAsRead(alertId) // Audit ok - case POST(p"/alert/$alertId/markAsUnread") => alertCtrl.markAsUnread(alertId) // Audit ok - case POST(p"/alert/$alertId/follow") => alertCtrl.followAlert(alertId) // Audit ok - case POST(p"/alert/$alertId/unfollow") => alertCtrl.unfollowAlert(alertId) // Audit ok - case POST(p"/alert/$alertId/createCase") => alertCtrl.createCase(alertId) // Audit ok - case POST(p"/alert/_search") => alertCtrl.search - // PATCH /alert/_bulk controllers.AlertCtrl.bulkUpdate - case POST(p"/alert/delete/_bulk") => alertCtrl.bulkDelete - case POST(p"/alert/_stats") => alertCtrl.stats - case DELETE(p"/alert/$alertId") => alertCtrl.delete(alertId) // Audit ok - case POST(p"/alert/$alertId/merge/$caseId") => alertCtrl.mergeWithCase(alertId, caseId) // Audit ok - case POST(p"/alert/merge/_bulk") => alertCtrl.bulkMergeWithCase - - case GET(p"/dashboard") => dashboardCtrl.search - case POST(p"/dashboard/_search") => dashboardCtrl.search - case POST(p"/dashboard/_stats") => dashboardCtrl.stats - case POST(p"/dashboard") => dashboardCtrl.create // Audit ok - case GET(p"/dashboard/$dashboardId") => dashboardCtrl.get(dashboardId) - case PATCH(p"/dashboard/$dashboardId") => dashboardCtrl.update(dashboardId) // Audit ok - case DELETE(p"/dashboard/$dashboardId") => dashboardCtrl.delete(dashboardId) // Audit ok - - case GET(p"/audit") => auditCtrl.flow(None) - case GET(p"/flow" ? q_o"rootId=$rootId") => auditCtrl.flow(rootId) - case GET(p"/audit") => auditCtrl.search - case POST(p"/audit/_search") => auditCtrl.search - case POST(p"/audit/_stats") => auditCtrl.stats - - case POST(p"/stream") => streamCtrl.create - case GET(p"/stream/status") => streamCtrl.status - case GET(p"/stream/$streamId") => streamCtrl.get(streamId) - - case GET(p"/datastore/$id" ? q_o"name=$name") => attachmentCtrl.download(id, name) - case GET(p"/datastorezip/$id" ? q_o"name=$name") => attachmentCtrl.downloadZip(id, name) - case GET(p"/describe/_all") => describeCtrl.describeAll - case GET(p"/describe/$modelName") => describeCtrl.describe(modelName) + case POST(p"/page/_search") => pageCtrl.search + case POST(p"/page/_stats") => pageCtrl.stats + case POST(p"/page") => pageCtrl.create + case GET(p"/page/$idOrTitle") => pageCtrl.get(idOrTitle) + case PATCH(p"/page/$idOrTitle") => pageCtrl.update(idOrTitle) + case DELETE(p"/page/$idOrTitle") => pageCtrl.delete(idOrTitle) - case GET(p"/config/user") => configCtrl.userList - case GET(p"/config/user/$path") => configCtrl.userGet(path) - case PUT(p"/config/user/$path") => configCtrl.userSet(path) - case GET(p"/config/organisation") => configCtrl.organisationList - case GET(p"/config/organisation/$path") => configCtrl.organisationGet(path) - case PUT(p"/config/organisation/$path") => configCtrl.organisationSet(path) - case GET(p"/config") => configCtrl.list - case GET(p"/config/$path") => configCtrl.get(path) - case PUT(p"/config/$path") => configCtrl.set(path) + case GET(p"/permission") => permissionCtrl.list case GET(p"/profile") => profileCtrl.search case POST(p"/profile/_search") => profileCtrl.search @@ -203,23 +202,34 @@ class Router @Inject() ( case PATCH(p"/profile/$profileId") => profileCtrl.update(profileId) case DELETE(p"/profile/$profileId") => profileCtrl.delete(profileId) + case POST(p"/_stats") => statsCtrl.stats + + case GET(p"/status") => statusCtrl.get + case GET(p"/health") => statusCtrl.health + + case POST(p"/stream") => streamCtrl.create + case GET(p"/stream/status") => streamCtrl.status + case GET(p"/stream/$streamId") => streamCtrl.get(streamId) + + case GET(p"/tag") => tagCtrl.search case POST(p"/tag/_search") => tagCtrl.search case POST(p"/tag/_stats") => tagCtrl.stats case POST(p"/tag/_import") => tagCtrl.importTaxonomy case GET(p"/tag/$id") => tagCtrl.get(id) - case POST(p"/page/_search") => pageCtrl.search - case POST(p"/page/_stats") => pageCtrl.stats - case GET(p"/page/$idOrTitle") => pageCtrl.get(idOrTitle) - case POST(p"/page") => pageCtrl.create - case PATCH(p"/page/$idOrTitle") => pageCtrl.update(idOrTitle) - case DELETE(p"/page/$idOrTitle") => pageCtrl.delete(idOrTitle) - - case GET(p"/permission") => permissionCtrl.list - - case GET(p"/observable/type") => observableTypeCtrl.search - case GET(p"/observable/type/$idOrName") => observableTypeCtrl.get(idOrName) - case POST(p"/observable/type") => observableTypeCtrl.create - case DELETE(p"/observable/type/$idOrName") => observableTypeCtrl.delete(idOrName) + case GET(p"/user") => userCtrl.search + case POST(p"/user/_search") => userCtrl.search + case POST(p"/user/_stats") => userCtrl.stats + case POST(p"/user") => userCtrl.create + case GET(p"/user/current") => userCtrl.current + case GET(p"/user/$userId") => userCtrl.get(userId) + case PATCH(p"/user/$userId") => userCtrl.update(userId) + case DELETE(p"/user/$userId") => userCtrl.lock(userId) + case DELETE(p"/user/$userId/force") => userCtrl.delete(userId) + case POST(p"/user/$userId/password/set") => userCtrl.setPassword(userId) + case POST(p"/user/$userId/password/change") => userCtrl.changePassword(userId) + case GET(p"/user/$userId/key") => userCtrl.getKey(userId) + case DELETE(p"/user/$userId/key") => userCtrl.removeKey(userId) + case POST(p"/user/$userId/key/renew") => userCtrl.renewKey(userId) } } diff --git a/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala b/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala index 9e03134ae1..61404c7d1f 100644 --- a/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v0/StatusCtrl.scala @@ -7,7 +7,7 @@ import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationForma import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} import org.thp.scalligraph.{EntityName, ScalligraphApplicationLoader} import org.thp.thehive.TheHiveModule -import org.thp.thehive.models.{HealthStatus, User} +import org.thp.thehive.models.{HealthStatus, TheHiveSchemaDefinition, User} import org.thp.thehive.services.{Connector, UserSrv} import play.api.libs.json.{JsObject, JsString, Json} import play.api.mvc.{AbstractController, Action, AnyContent, Results} @@ -24,6 +24,7 @@ class StatusCtrl @Inject() ( authSrv: AuthSrv, userSrv: UserSrv, connectors: immutable.Set[Connector], + theHiveSchemaDefinition: TheHiveSchemaDefinition, @Named("with-thehive-schema") db: Database ) { @@ -55,7 +56,15 @@ class StatusCtrl @Inject() ( "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso), "pollingDuration" -> streamPollingDuration.toMillis - ) + ), + "schemaStatus" -> (connectors.flatMap(_.schemaStatus) ++ theHiveSchemaDefinition.schemaStatus).map { schemaStatus => + Json.obj( + "name" -> schemaStatus.name, + "currentVersion" -> schemaStatus.currentVersion, + "expectedVersion" -> schemaStatus.expectedVersion, + "error" -> schemaStatus.error.map(_.getMessage) + ) + } ) ) ) diff --git a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala index f842c34564..9bf4511f1d 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/Properties.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/Properties.scala @@ -1,15 +1,12 @@ package org.thp.thehive.controllers.v1 -import java.lang.{Long => JLong} -import java.util.Date - -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.controllers.{FPathElem, FPathEmpty} import org.thp.scalligraph.models.{Database, UMapping} import org.thp.scalligraph.query.{PublicProperties, PublicPropertyListBuilder} import org.thp.scalligraph.traversal.Converter import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.{BadRequestError, EntityIdOrName, RichSeq} +import org.thp.thehive.dto.v1.InputCustomFieldValue import org.thp.thehive.models._ import org.thp.thehive.services.AlertOps._ import org.thp.thehive.services.AuditOps._ @@ -25,6 +22,8 @@ import org.thp.thehive.services.UserOps._ import org.thp.thehive.services._ import play.api.libs.json.{JsObject, JsValue, Json} +import java.util.Date +import javax.inject.{Inject, Named, Singleton} import scala.util.Failure @Singleton @@ -60,8 +59,8 @@ class Properties @Inject() ( .property("lastSyncDate", UMapping.date.optional)(_.field.updatable) .property("tags", UMapping.string.set)( _.select(_.tags.displayName) - .filter((_, cases) => - cases + .filter((_, alerts) => + alerts .tags .graphMap[String, String, Converter.Identity[String]]( { v => @@ -96,21 +95,21 @@ class Properties @Inject() ( alerts .customFields(EntityIdOrName(idOrName)) .jsonValue - case (_, caseSteps) => caseSteps.customFields.nameJsonValue.fold.domainMap(JsObject(_)) + case (_, alerts) => alerts.customFields.nameJsonValue.fold.domainMap(JsObject(_)) } .filter { - case (FPathElem(_, FPathElem(idOrName, _)), caseTraversal) => + case (FPathElem(_, FPathElem(idOrName, _)), alerts) => db .roTransaction(implicit graph => customFieldSrv.get(EntityIdOrName(idOrName)).value(_.`type`).getOrFail("CustomField")) .map { - case CustomFieldType.boolean => caseTraversal.customFields(EntityIdOrName(idOrName)).value(_.booleanValue) - case CustomFieldType.date => caseTraversal.customFields(EntityIdOrName(idOrName)).value(_.dateValue) - case CustomFieldType.float => caseTraversal.customFields(EntityIdOrName(idOrName)).value(_.floatValue) - case CustomFieldType.integer => caseTraversal.customFields(EntityIdOrName(idOrName)).value(_.integerValue) - case CustomFieldType.string => caseTraversal.customFields(EntityIdOrName(idOrName)).value(_.stringValue) + case CustomFieldType.boolean => alerts.customFields(EntityIdOrName(idOrName)).value(_.booleanValue) + case CustomFieldType.date => alerts.customFields(EntityIdOrName(idOrName)).value(_.dateValue) + case CustomFieldType.float => alerts.customFields(EntityIdOrName(idOrName)).value(_.floatValue) + case CustomFieldType.integer => alerts.customFields(EntityIdOrName(idOrName)).value(_.integerValue) + case CustomFieldType.string => alerts.customFields(EntityIdOrName(idOrName)).value(_.stringValue) } - .getOrElse(caseTraversal.constant2(null)) - case (_, caseTraversal) => caseTraversal.constant2(null) + .getOrElse(alerts.constant2(null)) + case (_, alerts) => alerts.constant2(null) } .converter { case FPathElem(_, FPathElem(idOrName, _)) => @@ -131,14 +130,14 @@ class Properties @Inject() ( .custom { case (FPathElem(_, FPathElem(idOrName, _)), value, vertex, _, graph, authContext) => for { - c <- caseSrv.get(vertex)(graph).getOrFail("Case") - _ <- caseSrv.setOrCreateCustomField(c, EntityIdOrName(idOrName), Some(value), None)(graph, authContext) + a <- alertSrv.get(vertex)(graph).getOrFail("Alert") + _ <- alertSrv.setOrCreateCustomField(a, InputCustomFieldValue(idOrName, Some(value), None))(graph, authContext) } yield Json.obj(s"customField.$idOrName" -> value) case (FPathElem(_, FPathEmpty), values: JsObject, vertex, _, graph, authContext) => for { - c <- caseSrv.get(vertex)(graph).getOrFail("Case") - cfv <- values.fields.toTry { case (n, v) => customFieldSrv.getOrFail(EntityIdOrName(n))(graph).map(cf => (cf, v, None)) } - _ <- caseSrv.updateCustomField(c, cfv)(graph, authContext) + c <- alertSrv.get(vertex)(graph).getOrFail("Alert") + cfv <- values.fields.toTry { case (n, v) => customFieldSrv.getOrFail(EntityIdOrName(n))(graph).map(_ -> v) } + _ <- alertSrv.updateCustomField(c, cfv)(graph, authContext) } yield Json.obj("customFields" -> values) case _ => Failure(BadRequestError("Invalid custom fields format")) }) diff --git a/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala b/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala index 1289dc0e89..99574a0bb7 100644 --- a/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala +++ b/thehive/app/org/thp/thehive/controllers/v1/StatusCtrl.scala @@ -9,15 +9,25 @@ import org.thp.scalligraph.controllers.Entrypoint import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationFormat import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} import org.thp.thehive.TheHiveModule +import org.thp.thehive.models.TheHiveSchemaDefinition +import org.thp.thehive.services.Connector import play.api.libs.json.{JsObject, JsString, Json, Writes} import play.api.mvc.{AbstractController, Action, AnyContent, Results} import javax.inject.{Inject, Singleton} +import scala.collection.immutable import scala.concurrent.duration.FiniteDuration import scala.util.Success @Singleton -class StatusCtrl @Inject() (entrypoint: Entrypoint, appConfig: ApplicationConfig, authSrv: AuthSrv, system: ActorSystem) { +class StatusCtrl @Inject() ( + entrypoint: Entrypoint, + appConfig: ApplicationConfig, + authSrv: AuthSrv, + connectors: immutable.Set[Connector], + theHiveSchemaDefinition: TheHiveSchemaDefinition, + system: ActorSystem +) { private def getVersion(c: Class[_]): String = Option(c.getPackage.getImplementationVersion).getOrElse("SNAPSHOT") @@ -67,10 +77,17 @@ class StatusCtrl @Inject() (entrypoint: Entrypoint, appConfig: ApplicationConfig "ssoAutoLogin" -> authSrv.capabilities.contains(AuthCapability.sso), "pollingDuration" -> streamPollingDuration.toMillis ), - "cluster" -> cluster.state + "cluster" -> cluster.state, + "schemaStatus" -> (connectors.flatMap(_.schemaStatus) ++ theHiveSchemaDefinition.schemaStatus).map { schemaStatus => + Json.obj( + "name" -> schemaStatus.name, + "currentVersion" -> schemaStatus.currentVersion, + "expectedVersion" -> schemaStatus.expectedVersion, + "error" -> schemaStatus.error.map(_.getMessage) + ) + } ) ) ) } - } diff --git a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala index da0fe21586..5f797b4f89 100644 --- a/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala +++ b/thehive/app/org/thp/thehive/models/TheHiveSchemaDefinition.scala @@ -27,8 +27,7 @@ class TheHiveSchemaDefinition @Inject() extends Schema with UpdatableSchema { // Make sure TypeDefinitionCategory has been initialised before ModifierType to prevent ExceptionInInitializerError TypeDefinitionCategory.BACKING_INDEX lazy val logger: Logger = Logger(getClass) - val name: String = "thehive" - val operations: Operations = Operations(name) + val operations: Operations = Operations("thehive") .addProperty[Option[Boolean]]("Observable", "seen") .updateGraph("Add manageConfig permission to org-admin profile", "Profile") { traversal => traversal.unsafeHas("name", "org-admin").raw.property("permissions", "manageConfig").iterate() diff --git a/thehive/app/org/thp/thehive/services/AlertSrv.scala b/thehive/app/org/thp/thehive/services/AlertSrv.scala index 41859c2590..63ded04b64 100644 --- a/thehive/app/org/thp/thehive/services/AlertSrv.scala +++ b/thehive/app/org/thp/thehive/services/AlertSrv.scala @@ -273,7 +273,7 @@ class AlertSrv @Inject() ( _ = integrityCheckActor ! EntityAdded("Alert") } yield createdCase } - }(richCase => auditSrv.`case`.create(richCase.`case`, richCase.toJson)) + }(richCase => auditSrv.alert.createCase(alert.alert, richCase.`case`, richCase.toJson.as[JsObject])) def mergeInCase(alertId: EntityIdOrName, caseId: EntityIdOrName)(implicit graph: Graph, authContext: AuthContext): Try[Case with Entity] = for { @@ -306,7 +306,7 @@ class AlertSrv @Inject() ( ) ) } yield details - }(details => auditSrv.alertToCase.merge(alert, `case`, Some(details))) + }(details => auditSrv.alert.mergeToCase(alert, `case`, details.as[JsObject])) .map(_ => integrityCheckActor ! EntityAdded("Alert")) .flatMap(_ => caseSrv.getOrFail(`case`._id)) diff --git a/thehive/app/org/thp/thehive/services/AuditSrv.scala b/thehive/app/org/thp/thehive/services/AuditSrv.scala index da4cd7981e..a1d3941b48 100644 --- a/thehive/app/org/thp/thehive/services/AuditSrv.scala +++ b/thehive/app/org/thp/thehive/services/AuditSrv.scala @@ -1,10 +1,7 @@ package org.thp.thehive.services -import java.util.{Map => JMap} - import akka.actor.ActorRef import com.google.inject.name.Named -import javax.inject.{Inject, Provider, Singleton} import org.apache.tinkerpop.gremlin.process.traversal.Order import org.apache.tinkerpop.gremlin.structure.Transaction.Status import org.apache.tinkerpop.gremlin.structure.{Graph, Vertex} @@ -20,6 +17,8 @@ import org.thp.thehive.services.OrganisationOps._ import org.thp.thehive.services.notification.AuditNotificationMessage import play.api.libs.json.{JsObject, JsValue, Json} +import java.util.{Map => JMap} +import javax.inject.{Inject, Provider, Singleton} import scala.util.{Success, Try} case class PendingAudit(audit: Audit, context: Option[Product with Entity], `object`: Option[Product with Entity]) @@ -31,18 +30,18 @@ class AuditSrv @Inject() ( eventSrv: EventSrv )(implicit @Named("with-thehive-schema") db: Database) extends VertexSrv[Audit] { auditSrv => - lazy val userSrv: UserSrv = userSrvProvider.get - val auditUserSrv = new EdgeSrv[AuditUser, Audit, User] - val auditedSrv = new EdgeSrv[Audited, Audit, Product] - val auditContextSrv = new EdgeSrv[AuditContext, Audit, Product] - val `case` = new SelfContextObjectAudit[Case] - val task = new SelfContextObjectAudit[Task] - val observable = new SelfContextObjectAudit[Observable] - val log = new ObjectAudit[Log, Task] - val caseTemplate = new SelfContextObjectAudit[CaseTemplate] - val taskInTemplate = new ObjectAudit[Task, CaseTemplate] - val alert = new SelfContextObjectAudit[Alert] - val alertToCase = new ObjectAudit[Alert, Case] + lazy val userSrv: UserSrv = userSrvProvider.get + val auditUserSrv = new EdgeSrv[AuditUser, Audit, User] + val auditedSrv = new EdgeSrv[Audited, Audit, Product] + val auditContextSrv = new EdgeSrv[AuditContext, Audit, Product] + val `case` = new SelfContextObjectAudit[Case] + val task = new SelfContextObjectAudit[Task] + val observable = new SelfContextObjectAudit[Observable] + val log = new ObjectAudit[Log, Task] + val caseTemplate = new SelfContextObjectAudit[CaseTemplate] + val taskInTemplate = new ObjectAudit[Task, CaseTemplate] + val alert = new AlertAudit +// val alertToCase = new ObjectAudit[Alert, Case] val share = new ShareAudit val observableInAlert = new ObjectAudit[Observable, Alert] val user = new UserAudit @@ -173,7 +172,10 @@ class AuditSrv @Inject() ( def delete(entity: E with Entity, context: Option[C with Entity])(implicit graph: Graph, authContext: AuthContext): Try[Unit] = auditSrv.create(Audit(Audit.delete, entity, None), context, None) - def merge(entity: E with Entity, destination: C with Entity, details: Option[JsObject] = None)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + def merge(entity: E with Entity, destination: C with Entity, details: Option[JsObject] = None)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = auditSrv.create(Audit(Audit.merge, destination, details.map(_.toString())), Some(destination), Some(destination)) } @@ -186,7 +188,10 @@ class AuditSrv @Inject() ( if (details == JsObject.empty) Success(()) else auditSrv.create(Audit(Audit.update, entity, Some(details.toString)), Some(entity), Some(entity)) - def delete(entity: E with Entity, context: Product with Entity, details: Option[JsObject] = None)(implicit graph: Graph, authContext: AuthContext): Try[Unit] = + def delete(entity: E with Entity, context: Product with Entity, details: Option[JsObject] = None)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = auditSrv.create(Audit(Audit.delete, entity, details.map(_.toString())), Some(context), None) } @@ -272,6 +277,34 @@ class AuditSrv @Inject() ( Some(`case`) ) } + + class AlertAudit extends SelfContextObjectAudit[Alert] { + def createCase(alert: Alert with Entity, `case`: Case with Entity, details: JsObject)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = { + val detailsWithAlert = details + ("fromAlert" -> Json.obj( + "_id" -> alert._id.toString, + "type" -> alert.`type`, + "source" -> alert.source, + "sourceRef" -> alert.sourceRef + )) + auditSrv.create(Audit(Audit.create, `case`, Some(detailsWithAlert.toString)), Some(`case`), Some(`case`)) + } + + def mergeToCase(alert: Alert with Entity, `case`: Case with Entity, details: JsObject)(implicit + graph: Graph, + authContext: AuthContext + ): Try[Unit] = { + val detailsWithAlert = details + ("fromAlert" -> Json.obj( + "_id" -> alert._id.toString, + "type" -> alert.`type`, + "source" -> alert.source, + "sourceRef" -> alert.sourceRef + )) + auditSrv.create(Audit(Audit.merge, `case`, Some(detailsWithAlert.toString)), Some(`case`), Some(`case`)) + } + } } object AuditOps { diff --git a/thehive/app/org/thp/thehive/services/Connector.scala b/thehive/app/org/thp/thehive/services/Connector.scala index fec3027ce2..e53de30a9d 100644 --- a/thehive/app/org/thp/thehive/services/Connector.scala +++ b/thehive/app/org/thp/thehive/services/Connector.scala @@ -1,10 +1,12 @@ package org.thp.thehive.services +import org.thp.scalligraph.models.SchemaStatus import org.thp.thehive.models.HealthStatus import play.api.libs.json.{JsObject, Json} trait Connector { val name: String - def status: JsObject = Json.obj("enabled" -> true) - def health: HealthStatus.Value = HealthStatus.Ok + def status: JsObject = Json.obj("enabled" -> true) + def health: HealthStatus.Value = HealthStatus.Ok + def schemaStatus: Option[SchemaStatus] = None } diff --git a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala index 6d4115acd8..bc8221e55c 100644 --- a/thehive/app/org/thp/thehive/services/OrganisationSrv.scala +++ b/thehive/app/org/thp/thehive/services/OrganisationSrv.scala @@ -108,11 +108,12 @@ class OrganisationSrv @Inject() ( authContext: AuthContext, graph: Graph ): Try[Unit] = { + val toOrgIds = toOrganisations.map(_.fold(identity, getByName(_)._id.getOrFail("Organisation").get)).toSet val (orgToAdd, orgToRemove) = get(fromOrg) .links ._id .toIterator - .foldLeft((toOrganisations.toSet, Set.empty[EntityId])) { + .foldLeft((toOrgIds, Set.empty[EntityId])) { case ((toAdd, toRemove), o) if toAdd.contains(o) => (toAdd - o, toRemove) case ((toAdd, toRemove), o) => (toAdd, toRemove + o) } diff --git a/thehive/app/org/thp/thehive/services/StreamSrv.scala b/thehive/app/org/thp/thehive/services/StreamSrv.scala index e3a89c3e22..225e9c29fb 100644 --- a/thehive/app/org/thp/thehive/services/StreamSrv.scala +++ b/thehive/app/org/thp/thehive/services/StreamSrv.scala @@ -1,23 +1,23 @@ package org.thp.thehive.services -import java.io.NotSerializableException - import akka.actor.{actorRef2Scala, Actor, ActorIdentity, ActorRef, ActorSystem, Cancellable, Identify, PoisonPill, Props} import akka.pattern.{ask, AskTimeoutException} import akka.serialization.Serializer import akka.util.Timeout -import javax.inject.{Inject, Named, Singleton} import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.models.Database import org.thp.scalligraph.services.EventSrv import org.thp.scalligraph.services.config.ApplicationConfig.finiteDurationFormat import org.thp.scalligraph.services.config.{ApplicationConfig, ConfigItem} import org.thp.scalligraph.traversal.TraversalOps._ +import org.thp.scalligraph.utils.Retry import org.thp.scalligraph.{EntityId, NotFoundError} import org.thp.thehive.services.AuditOps._ import play.api.Logger import play.api.libs.json.Json +import java.io.NotSerializableException +import javax.inject.{Inject, Named, Singleton} import scala.collection.immutable import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.concurrent.{ExecutionContext, Future} @@ -161,6 +161,22 @@ class StreamSrv @Inject() ( appConfig.item[FiniteDuration]("stream.longPolling.keepAlive", "Remove the stream after this time of inactivity") val keepAlive: FiniteDuration = keepAliveConfig.get + val maxAttemptsConfig: ConfigItem[Int, Int] = + appConfig.item[Int]("stream.get.maxAttempts", "How many attempts to get stream") + def maxAttempts: Int = maxAttemptsConfig.get + + val minBackoffConfig: ConfigItem[FiniteDuration, FiniteDuration] = + appConfig.item[FiniteDuration]("stream.get.minBackoff", "Minimum backoff for get stream attempts") + def minBackoff: FiniteDuration = minBackoffConfig.get + + val maxBackoffConfig: ConfigItem[FiniteDuration, FiniteDuration] = + appConfig.item[FiniteDuration]("stream.get.maxBackoff", "Maximum backoff for get stream attempts") + def maxBackoff: FiniteDuration = maxBackoffConfig.get + + val randomFactorConfig: ConfigItem[Double, Double] = + appConfig.item[Double]("stream.get.randomFactor", "Random factor for stream attempts backoff") + def randomFactor: Double = randomFactorConfig.get + def generateStreamId(): String = Seq.fill(streamLength)(alphanumeric(Random.nextInt(alphanumeric.size))).mkString def isValidStreamId(streamId: String): Boolean = streamId.length == streamLength && streamId.forall(alphanumeric.contains) @@ -180,23 +196,25 @@ class StreamSrv @Inject() ( def get(streamId: String): Future[Seq[EntityId]] = { implicit val timeout: Timeout = Timeout(refresh + 1.second) - // Check if stream actor exists - eventSrv - .publishAsk(StreamTopic(streamId))(Identify(1))(Timeout(2.seconds)) - // .ask(s"/user/stream-$streamId", Identify(1))(Timeout(2.seconds)) - .flatMap { - case ActorIdentity(1, Some(streamActor)) => - logger.debug(s"Stream actor found for stream $streamId") - (streamActor ? GetStreamMessages) - .map { - case AuditStreamMessage(ids @ _*) => ids - case _ => Nil - } - case other => Future.failed(NotFoundError(s"Stream $streamId doesn't exist: $other")) - } - .recoverWith { - case _: AskTimeoutException => Future.failed(NotFoundError(s"Stream $streamId doesn't exist")) - } + Retry(maxAttempts).withBackoff(minBackoff, maxBackoff, randomFactor)(system) { + // Check if stream actor exists + eventSrv + .publishAsk(StreamTopic(streamId))(Identify(1))(Timeout(2.seconds)) + // .ask(s"/user/stream-$streamId", Identify(1))(Timeout(2.seconds)) + .flatMap { + case ActorIdentity(1, Some(streamActor)) => + logger.debug(s"Stream actor found for stream $streamId") + (streamActor ? GetStreamMessages) + .map { + case AuditStreamMessage(ids @ _*) => ids + case _ => Nil + } + case other => Future.failed(NotFoundError(s"Stream $streamId doesn't exist: $other")) + } + .recoverWith { + case _: AskTimeoutException => Future.failed(NotFoundError(s"Stream $streamId doesn't exist")) + } + } } } diff --git a/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala b/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala index 024d84b732..f88bca8aeb 100644 --- a/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala +++ b/thehive/app/org/thp/thehive/services/notification/notifiers/Webhook.scala @@ -255,13 +255,14 @@ class Webhook( Future.failed(BadConfigurationError(s"The organisation ${organisation.name} is not authorised to use the webhook ${config.name}")) else if (user.isDefined) Future.failed(BadConfigurationError("The notification webhook must not be applied on user")) - else - for { + else { + val ws = new ProxyWS(config.wsConfig, mat) + val async = for { message <- Future.fromTry(buildMessage(config.version, audit)) _ = logger.debug(s"Request webhook with message $message") - resp <- new ProxyWS(config.wsConfig, mat) - .url(config.url) - .post(message) + resp <- ws.url(config.url).post(message) } yield if (resp.status >= 400) logger.warn(s"Webhook call on ${config.url} returns ${resp.status} ${resp.statusText}") else () + async.andThen { case _ => ws.close() } + } } diff --git a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala index 79d89cf159..ce0f320799 100644 --- a/thehive/app/org/thp/thehive/services/th3/Aggregation.scala +++ b/thehive/app/org/thp/thehive/services/th3/Aggregation.scala @@ -3,14 +3,13 @@ package org.thp.thehive.services.th3 import java.lang.{Long => JLong} import java.time.temporal.ChronoUnit import java.util.{Calendar, Date, List => JList} - import org.apache.tinkerpop.gremlin.process.traversal.Order import org.scalactic.Accumulation._ import org.scalactic._ import org.thp.scalligraph.auth.AuthContext import org.thp.scalligraph.controllers._ import org.thp.scalligraph.models.Database -import org.thp.scalligraph.query.{Aggregation, PublicProperties} +import org.thp.scalligraph.query.{Aggregation, InputQuery, PublicProperties} import org.thp.scalligraph.traversal.TraversalOps._ import org.thp.scalligraph.traversal._ import org.thp.scalligraph.{BadRequestError, InvalidFormatAttributeError} @@ -64,7 +63,9 @@ object TH3Aggregation { }) } - def aggregationFieldParser: PartialFunction[String, FieldsParser[Aggregation]] = { + def aggregationFieldParser( + filterParser: FieldsParser[InputQuery[Traversal.Unk, Traversal.Unk]] + ): PartialFunction[String, FieldsParser[Aggregation]] = { case "field" => FieldsParser("FieldAggregation") { case (_, field) => @@ -73,12 +74,16 @@ object TH3Aggregation { FieldsParser.string.on("_field")(field), FieldsParser.string.sequence.on("_order")(field).orElse(FieldsParser.string.on("_order").map("order")(Seq(_))(field)), FieldsParser.long.optional.on("_size")(field), - fieldsParser.sequence.on("_select")(field) - )((aggName, fieldName, order, size, subAgg) => FieldAggregation(aggName, fieldName, order, size, subAgg)) + fieldsParser(filterParser).sequence.on("_select")(field), + filterParser.optional.on("_query")(field) + )((aggName, fieldName, order, size, subAgg, filter) => FieldAggregation(aggName, fieldName, order, size, subAgg, filter)) } case "count" => FieldsParser("CountAggregation") { - case (_, field) => FieldsParser.string.optional.on("_name")(field).map(aggName => AggCount(aggName)) + case (_, field) => + withGood(FieldsParser.string.optional.on("_name")(field), filterParser.optional.on("_query")(field))((aggName, filter) => + AggCount(aggName, filter) + ) } case "time" => FieldsParser("TimeAggregation") { @@ -91,11 +96,12 @@ object TH3Aggregation { .on("_fields")(field) .orElse(FieldsParser.string.on("_fields")(field).map(Seq(_))), //.map("toSeq")(f => Good(Seq(f)))), mergedIntervalParser.on("_interval").orElse(intervalParser)(field), - fieldsParser.sequence.on("_select")(field) - ) { (aggName, fieldNames, intervalUnit, subAgg) => + fieldsParser(filterParser).sequence.on("_select")(field), + filterParser.optional.on("_query")(field) + ) { (aggName, fieldNames, intervalUnit, subAgg, filter) => if (fieldNames.lengthCompare(1) > 0) logger.warn(s"Only one field is supported for time aggregation (aggregation $aggName, ${fieldNames.tail.mkString(",")} are ignored)") - TimeAggregation(aggName, fieldNames.head, intervalUnit._1, intervalUnit._2, subAgg) + TimeAggregation(aggName, fieldNames.head, intervalUnit._1, intervalUnit._2, subAgg, filter) } } case "avg" => @@ -103,32 +109,36 @@ object TH3Aggregation { case (_, field) => withGood( FieldsParser.string.optional.on("_name")(field), - FieldsParser.string.on("_field")(field) - )((aggName, fieldName) => AggAvg(aggName, fieldName)) + FieldsParser.string.on("_field")(field), + filterParser.optional.on("_query")(field) + )((aggName, fieldName, filter) => AggAvg(aggName, fieldName, filter)) } case "min" => FieldsParser("MinAggregation") { case (_, field) => withGood( FieldsParser.string.optional.on("_name")(field), - FieldsParser.string.on("_field")(field) - )((aggName, fieldName) => AggMin(aggName, fieldName)) + FieldsParser.string.on("_field")(field), + filterParser.optional.on("_query")(field) + )((aggName, fieldName, filter) => AggMin(aggName, fieldName, filter)) } case "max" => FieldsParser("MaxAggregation") { case (_, field) => withGood( FieldsParser.string.optional.on("_name")(field), - FieldsParser.string.on("_field")(field) - )((aggName, fieldName) => AggMax(aggName, fieldName)) + FieldsParser.string.on("_field")(field), + filterParser.optional.on("_query")(field) + )((aggName, fieldName, filter) => AggMax(aggName, fieldName, filter)) } case "sum" => FieldsParser("SumAggregation") { case (_, field) => withGood( FieldsParser.string.optional.on("_name")(field), - FieldsParser.string.on("_field")(field) - )((aggName, fieldName) => AggSum(aggName, fieldName)) + FieldsParser.string.on("_field")(field), + filterParser.optional.on("_query")(field) + )((aggName, fieldName, filter) => AggSum(aggName, fieldName, filter)) } case other => new FieldsParser[Aggregation]( @@ -141,12 +151,14 @@ object TH3Aggregation { ) } - implicit val fieldsParser: FieldsParser[Aggregation] = FieldsParser("aggregation") { - case (_, AggObj(name, field)) => aggregationFieldParser(name)(field) - } + def fieldsParser(filterParser: FieldsParser[InputQuery[Traversal.Unk, Traversal.Unk]]): FieldsParser[Aggregation] = + FieldsParser("aggregation") { + case (_, AggObj(name, field)) => aggregationFieldParser(filterParser)(name)(field) + } } -case class AggSum(aggName: Option[String], fieldName: String) extends Aggregation(aggName.getOrElse(s"sum_$fieldName")) { +case class AggSum(aggName: Option[String], fieldName: String, filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]]) + extends Aggregation(aggName.getOrElse(s"sum_$fieldName")) { override def getTraversal( db: Database, publicProperties: PublicProperties, @@ -158,18 +170,22 @@ case class AggSum(aggName: Option[String], fieldName: String) extends Aggregatio val property = publicProperties .get[Traversal.UnkD, Traversal.UnkDU](fieldPath, traversalType) .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) - traversal.coalesce( - t => - property - .select(fieldPath, t, authContext) - .sum - .domainMap(sum => Output(Json.obj(name -> JsNumber(BigDecimal(sum.toString))))) - .castDomain[Output[_]], - Output(Json.obj(name -> JsNull)) - ) + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .coalesce( + t => + property + .select(fieldPath, t, authContext) + .sum + .domainMap(sum => Output(Json.obj(name -> JsNumber(BigDecimal(sum.toString))))) + .castDomain[Output[_]], + Output(Json.obj(name -> JsNull)) + ) } } -case class AggAvg(aggName: Option[String], fieldName: String) extends Aggregation(aggName.getOrElse(s"sum_$fieldName")) { + +case class AggAvg(aggName: Option[String], fieldName: String, filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]]) + extends Aggregation(aggName.getOrElse(s"sum_$fieldName")) { override def getTraversal( db: Database, publicProperties: PublicProperties, @@ -181,19 +197,22 @@ case class AggAvg(aggName: Option[String], fieldName: String) extends Aggregatio val property = publicProperties .get[Traversal.UnkD, Traversal.UnkDU](fieldPath, traversalType) .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) - traversal.coalesce( - t => - property - .select(fieldPath, t, authContext) - .mean - .domainMap(avg => Output(Json.obj(name -> avg))) - .asInstanceOf[Traversal.Domain[Output[_]]], - Output(Json.obj(name -> JsNull)) - ) + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .coalesce( + t => + property + .select(fieldPath, t, authContext) + .mean + .domainMap(avg => Output(Json.obj(name -> avg))) + .asInstanceOf[Traversal.Domain[Output[_]]], + Output(Json.obj(name -> JsNull)) + ) } } -case class AggMin(aggName: Option[String], fieldName: String) extends Aggregation(aggName.getOrElse(s"min_$fieldName")) { +case class AggMin(aggName: Option[String], fieldName: String, filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]]) + extends Aggregation(aggName.getOrElse(s"min_$fieldName")) { override def getTraversal( db: Database, publicProperties: PublicProperties, @@ -205,18 +224,21 @@ case class AggMin(aggName: Option[String], fieldName: String) extends Aggregatio val property = publicProperties .get[Traversal.UnkD, Traversal.UnkDU](fieldPath, traversalType) .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) - traversal.coalesce( - t => - property - .select(fieldPath, t, authContext) - .min - .domainMap(min => Output(Json.obj(name -> property.mapping.selectRenderer.toJson(min)))), - Output(Json.obj(name -> JsNull)) - ) + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .coalesce( + t => + property + .select(fieldPath, t, authContext) + .min + .domainMap(min => Output(Json.obj(name -> property.mapping.selectRenderer.toJson(min)))), + Output(Json.obj(name -> JsNull)) + ) } } -case class AggMax(aggName: Option[String], fieldName: String) extends Aggregation(aggName.getOrElse(s"max_$fieldName")) { +case class AggMax(aggName: Option[String], fieldName: String, filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]]) + extends Aggregation(aggName.getOrElse(s"max_$fieldName")) { override def getTraversal( db: Database, publicProperties: PublicProperties, @@ -228,18 +250,21 @@ case class AggMax(aggName: Option[String], fieldName: String) extends Aggregatio val property = publicProperties .get[Traversal.UnkD, Traversal.UnkDU](fieldPath, traversalType) .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) - traversal.coalesce( - t => - property - .select(fieldPath, t, authContext) - .max - .domainMap(max => Output(Json.obj(name -> property.mapping.selectRenderer.toJson(max)))), - Output(Json.obj(name -> JsNull)) - ) + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .coalesce( + t => + property + .select(fieldPath, t, authContext) + .max + .domainMap(max => Output(Json.obj(name -> property.mapping.selectRenderer.toJson(max)))), + Output(Json.obj(name -> JsNull)) + ) } } -case class AggCount(aggName: Option[String]) extends Aggregation(aggName.getOrElse("count")) { +case class AggCount(aggName: Option[String], filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]]) + extends Aggregation(aggName.getOrElse("count")) { override def getTraversal( db: Database, publicProperties: PublicProperties, @@ -247,7 +272,8 @@ case class AggCount(aggName: Option[String]) extends Aggregation(aggName.getOrEl traversal: Traversal.Unk, authContext: AuthContext ): Traversal.Domain[Output[_]] = - traversal + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) .count .domainMap(count => Output(Json.obj(name -> count))) .castDomain[Output[_]] @@ -260,7 +286,8 @@ case class FieldAggregation( fieldName: String, orders: Seq[String], size: Option[Long], - subAggs: Seq[Aggregation] + subAggs: Seq[Aggregation], + filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]] ) extends Aggregation(aggName.getOrElse(s"field_$fieldName")) { lazy val logger: Logger = Logger(getClass) @@ -276,7 +303,16 @@ case class FieldAggregation( val property = publicProperties .get[Traversal.UnkD, Traversal.UnkDU](fieldPath, traversalType) .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) - val groupedVertices = property.select(fieldPath, traversal.as(label), authContext).group(_.by, _.by(_.select(label).fold)).unfold + val groupedVertices = property + .select( + fieldPath, + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .as(label), + authContext + ) + .group(_.by, _.by(_.select(label).fold)) + .unfold val sortedAndGroupedVertex = orders .map { case order if order.headOption.contains('-') => order.tail -> Order.desc @@ -327,7 +363,8 @@ case class TimeAggregation( fieldName: String, interval: Long, unit: ChronoUnit, - subAggs: Seq[Aggregation] + subAggs: Seq[Aggregation], + filter: Option[InputQuery[Traversal.Unk, Traversal.Unk]] ) extends Aggregation(aggName.getOrElse(fieldName)) { val calendar: Calendar = Calendar.getInstance() @@ -378,7 +415,13 @@ case class TimeAggregation( .getOrElse(throw BadRequestError(s"Property $fieldName for type $traversalType not found")) val label = StepLabel[Traversal.UnkD, Traversal.UnkG, Converter[Traversal.UnkD, Traversal.UnkG]] val groupedVertex = property - .select(fieldPath, traversal.as(label), authContext) + .select( + fieldPath, + filter + .fold(traversal)(_(db, publicProperties, traversalType, traversal, authContext)) + .as(label), + authContext + ) .cast[Date, Date] .graphMap[Long, JLong, Converter[Long, JLong]](dateToKey, Converter.long) .group(_.by, _.by(_.select(label).fold)) diff --git a/thehive/conf/reference.conf b/thehive/conf/reference.conf index 523fb7d7d1..cfab71532e 100644 --- a/thehive/conf/reference.conf +++ b/thehive/conf/reference.conf @@ -36,12 +36,21 @@ datastore.attachment.password: "malware" describe.cache.expire: 10 minutes -stream.longPolling { - refresh: 1 minute - maxWait: 1 second - graceDuration: 100 milliseconds - keepAlive: 5 minutes - pollingDuration: 1 second +stream { + longPolling { + refresh: 1 minute + maxWait: 1 second + graceDuration: 100 milliseconds + keepAlive: 5 minutes + pollingDuration: 1 second + + } + get { + maxAttempts = 5 + minBackoff = 10 milliseconds + maxBackoff = 50 milliseconds + randomFactor = 0.2 + } } tags { diff --git a/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala b/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala index eb1921fae6..d1d1f91ce7 100644 --- a/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala +++ b/thehive/test/org/thp/thehive/controllers/v0/StatusCtrlTest.scala @@ -1,5 +1,6 @@ package org.thp.thehive.controllers.v0 +import org.thp.scalligraph.models.SchemaStatus import org.thp.scalligraph.{AppBuilder, ScalligraphApplicationLoader} import org.thp.thehive.models.HealthStatus import org.thp.thehive.services.Connector @@ -28,6 +29,8 @@ class StatusCtrlTest extends PlaySpecification with TestAppBuilder { ) override def health: HealthStatus.Value = HealthStatus.Warning + + override def schemaStatus: Option[SchemaStatus] = None } override def appConfigure: AppBuilder = super.appConfigure.multiBindInstance[Connector](fakeCortexConnector) @@ -68,7 +71,8 @@ class StatusCtrlTest extends PlaySpecification with TestAppBuilder { "capabilities" -> Seq("changePassword", "setPassword", "authByKey"), "ssoAutoLogin" -> config.get[Boolean]("user.autoCreateOnSso"), "pollingDuration" -> 1000 - ) + ), + "schemaStatus" -> Json.arr() ) resultJson shouldEqual expectedJson