Skip to content

Commit abde6a5

Browse files
authored
#659: Reduce logging in hyperdrive (#663)
* #659: Reduce logging in hyperdrive
1 parent 979138f commit abde6a5

File tree

11 files changed

+14
-15
lines changed

11 files changed

+14
-15
lines changed

src/main/resources/logback.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
</layout>
4444
</appender>
4545

46-
<logger name="za.co.absa.hyperdrive.trigger" level="debug" additivity="false">
46+
<logger name="za.co.absa.hyperdrive.trigger" level="info" additivity="false">
4747
<appender-ref ref="FILE-ROLLING"/>
4848
<appender-ref ref="Console" />
4949
</logger>

src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/NotificationRuleController.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515

1616
package za.co.absa.hyperdrive.trigger.api.rest.controllers
1717

18-
import org.slf4j.LoggerFactory
1918
import org.springframework.web.bind.annotation._
2019
import za.co.absa.hyperdrive.trigger.api.rest.services.NotificationRuleService
2120
import za.co.absa.hyperdrive.trigger.models._
@@ -28,7 +27,6 @@ import scala.concurrent.ExecutionContext.Implicits.global
2827

2928
@RestController
3029
class NotificationRuleController @Inject()(notificationRuleService: NotificationRuleService) {
31-
private val logger = LoggerFactory.getLogger(this.getClass)
3230

3331
@PutMapping(path = Array("/notificationRule"))
3432
def createNotificationRule(@RequestBody notificationRule: NotificationRule): CompletableFuture[NotificationRule] = {

src/main/scala/za/co/absa/hyperdrive/trigger/configuration/liquibase/LiquibaseConfiguration.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ class LiquibaseConfiguration(properties: LiquibaseProperties, dbConfig: Database
6969
if (!unrunChangeSets.isEmpty) {
7070
liquibase.update(contexts)
7171
} else {
72-
configLogger.debug("Database schema is up-to-date")
72+
configLogger.info("Database schema is up-to-date")
7373
}
7474
}
7575

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/JobScheduler.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class JobScheduler @Inject()(sensors: Sensors, executors: Executors, dagInstance
6363
runningScheduler =
6464
Future {
6565
while (isManagerRunningAtomic.get()) {
66-
logger.debug("Running manager heart beat.")
66+
logger.info("Running manager heart beat.")
6767
assignWorkflows(firstIteration)
6868
sendNotifications()
6969
firstIteration = false
@@ -72,7 +72,7 @@ class JobScheduler @Inject()(sensors: Sensors, executors: Executors, dagInstance
7272
}
7373
runningScheduler.onComplete {
7474
case Success(_) =>
75-
logger.debug("Manager stopped.")
75+
logger.info("Manager stopped.")
7676
case Failure(exception) =>
7777
logger.error(s"Manager stopped with exception.", exception)
7878
}

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/cluster/SchedulerInstanceService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class SchedulerInstanceServiceImpl @Inject()(schedulerInstanceRepository: Schedu
4848
Future{}
4949
}
5050
deactivatedCount <- schedulerInstanceRepository.deactivateLaggingInstances(instanceId, currentHeartbeat, lagThreshold)
51-
_ = if (deactivatedCount != 0) logger.debug(s"Deactivated $deactivatedCount instances at current heartbeat $currentHeartbeat")
51+
_ = if (deactivatedCount != 0) logger.info(s"Deactivated $deactivatedCount instances at current heartbeat $currentHeartbeat")
5252
allInstances <- schedulerInstanceRepository.getAllInstances()
5353
} yield allInstances
5454
}

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/eventProcessor/EventProcessor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ class EventProcessor(eventRepository: EventRepository,
3232

3333
def eventProcessor(triggeredBy: String)(events: Seq[Event], sensorId: Long)(implicit ec: ExecutionContext): Future[Boolean] = {
3434
val fut = processEvents(events, sensorId, triggeredBy)
35-
logger.debug(s"Processing events. Sensor id: $sensorId. Events: ${events.map(_.id)}")
35+
logger.info(s"Processing events. Sensor id: $sensorId. Events: ${events.map(_.id)}")
3636
fut
3737
}
3838

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/Executors.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ class Executors @Inject()(dagInstanceRepository: DagInstanceRepository, jobInsta
9191
}
9292
}
9393
fut.onComplete {
94-
case Success(_) => logger.debug(s"Executing job. Job instance id = ${jobInstance}")
94+
case Success(_) => logger.debug(s"Executing job. Job instance = ${jobInstance}")
9595
case Failure(exception) => {
9696
logger.error(s"Executing job failed. Job instance id = ${jobInstance}.", exception)
9797
}
@@ -101,6 +101,7 @@ class Executors @Inject()(dagInstanceRepository: DagInstanceRepository, jobInsta
101101
}
102102

103103
private def updateJob(jobInstance: JobInstance): Future[Unit] = {
104+
logger.info(s"Job updated. ID = ${jobInstance.id} STATUS = ${jobInstance.jobStatus} EXECUTOR_ID = ${jobInstance.executorJobId}")
104105
jobInstanceRepository.updateJob(jobInstance)
105106
}
106107

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/shell/ShellExecutor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ object ShellExecutor {
4040
updateJob(jobInstance.copy(jobStatus = Running)).map { _ =>
4141
Try {
4242
jobParameters.scriptLocation.!(new ProcessLogger {
43-
override def out(s: => String): Unit = logger.info(s)
43+
override def out(s: => String): Unit = logger.debug(s)
4444
override def err(s: => String): Unit = logger.error(s)
4545
override def buffer[T](f: => T): T = f
4646
})

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/sensors/Sensors.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class Sensors @Inject()(eventProcessor: EventProcessor, sensorRepository: Sensor
4545
private val sensors: mutable.Map[Long, Sensor[ _<: SensorProperties]] = mutable.Map.empty[Long, Sensor[ _<: SensorProperties]]
4646

4747
def processEvents(assignedWorkflowIds: Seq[Long], firstIteration: Boolean): Future[Unit] = {
48-
logger.debug(s"Processing events. Sensors: ${sensors.keys}")
48+
logger.info(s"Processing events. Sensors: ${sensors.keys}")
4949
removeReleasedSensors(assignedWorkflowIds)
5050
val fut = for {
5151
_ <- removeInactiveSensors()
@@ -57,7 +57,7 @@ class Sensors @Inject()(eventProcessor: EventProcessor, sensorRepository: Sensor
5757
}
5858

5959
fut.onComplete {
60-
case Success(_) => logger.debug("Processing events successful")
60+
case Success(_) => logger.info("Processing events successful")
6161
case Failure(exception) => {
6262
logger.debug("Processing events failed.", exception)
6363
}

src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/sensors/kafka/KafkaSensor.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class KafkaSensor(
6363
}
6464
})
6565
} catch {
66-
case e: Exception => logger.debug(s"$logMsgPrefix. Exception during subscribe.", e)
66+
case e: Exception => logger.error(s"$logMsgPrefix. Exception during subscribe.", e)
6767
}
6868

6969
override def poll(): Future[Unit] = {
@@ -108,7 +108,7 @@ class KafkaSensor(
108108
val payload = Try(
109109
Json.parse(record.value())
110110
).getOrElse {
111-
logger.debug(s"$logMsgPrefix. Invalid message.")
111+
logger.error(s"$logMsgPrefix. Invalid message.")
112112
Json.parse(s"""{"errorMessage": "${record.value()}"}""")
113113
}
114114
Event(sourceEventId, sensorDefinition.id, payload)

0 commit comments

Comments
 (0)