Skip to content

Commit 6792c93

Browse files
authored
#70: Update app properties in readme (#637)
1 parent 7cf6e19 commit 6792c93

File tree

2 files changed

+62
-22
lines changed

2 files changed

+62
-22
lines changed

README.md

Lines changed: 56 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -77,42 +77,63 @@ Adjusted application properties have to be provided. An application properties t
7777
7878
# Enviroment where application will be running
7979
environment=Local
80+
# Maximum number of workflows that can be triggered in bulk (Default value 10)
81+
application.maximumNumberOfWorkflowsInBulkRun=10
82+
```
83+
```
84+
# Unique app id, for easier application identification
85+
appUniqueId=
86+
```
87+
```
88+
# Health check settings
89+
health.databaseConnection.timeoutMillis=120000
90+
health.yarnConnection.testEndpoint=/cluster/cluster
91+
health.yarnConnection.timeoutMillis=120000
8092
```
8193
```
8294
# How will users authenticate. Available options: inmemory, ldap
8395
auth.mechanism=inmemory
96+
#If set, all users that do not have admim role will not have access to admin protected endpoints
97+
auth.admin.role=ROLE_ADMIN
8498
# INMEMORY authentication: username and password defined here will be used for authentication.
85-
auth.inmemory.user=user
86-
auth.inmemory.password=password
99+
auth.inmemory.user=hyperdriver-user
100+
auth.inmemory.password=hyperdriver-password
101+
auth.inmemory.admin.user=hyperdriver-admin-user
102+
auth.inmemory.admin.password=hyperdriver-admin-password
87103
# LDAP authentication: props template that has to be defined in case of LDAP authentication
88104
auth.ad.domain=
89105
auth.ad.server=
90106
auth.ldap.search.base=
91107
auth.ldap.search.filter=
92108
```
93109
```
94-
# Unique app id, for easier application identification
95-
appUniqueId=
96-
```
97-
```
98110
# Core properties.
99111
# How many threads to use for each part of the "scheduler".
100112
# Heart beat interval in milliseconds.
101113
# Lag threshold, before instance is deactivated by another instance.
114+
scheduler.autostart=true
102115
scheduler.thread.pool.size=10
103116
scheduler.sensors.thread.pool.size=20
117+
scheduler.sensors.changedSensorsChunkQuerySize=100
104118
scheduler.executors.thread.pool.size=30
105119
scheduler.jobs.parallel.number=100
106120
scheduler.heart.beat=5000
107121
scheduler.lag.threshold=20000
108122
```
109123
```
124+
# Propeties used to send notifications to users.
125+
notification.enabled=false
126+
notification.sender.address=
127+
spring.mail.host=
128+
spring.mail.port=
129+
```
130+
```
110131
#Kafka sensor properties. Not all are required. Adjust according to your use case.
111-
kafkaSource.group.id=hyper_drive_${appUniqueId}
112-
kafkaSource.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
113-
kafkaSource.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
132+
kafkaSource.group.id.prefix=hyper_drive_${appUniqueId}
114133
kafkaSource.poll.duration=500
115-
kafkaSource.max.poll.records=3
134+
kafkaSource.properties.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
135+
kafkaSource.properties.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
136+
kafkaSource.properties.max.poll.records=3
116137
kafkaSource.properties.enable.auto.commit=false
117138
kafkaSource.properties.auto.offset.reset=latest
118139
kafkaSource.properties.security.protocol=
@@ -126,13 +147,20 @@ kafkaSource.properties.sasl.mechanism=
126147
kafkaSource.properties.sasl.jaas.config=
127148
```
128149
```
129-
#Spark yarn sink properties. Properties used to deploy and run Spark job in Yarn. Not all are required. Adjust according to your use case.
130-
spark.submit.thread.pool.size=
131-
sparkYarnSink.hadoopResourceManagerUrlBase=
132-
sparkYarnSink.hadoopConfDir=
133-
sparkYarnSink.sparkHome=
134-
sparkYarnSink.master=yarn
150+
# Recurring sensor properties.
151+
recurringSensor.maxJobsPerDuration=8
152+
recurringSensor.duration=1h
153+
```
154+
```
155+
#Spark properties. Properties used to deploy and run Spark job. Not all are required. Adjust according to your use case.
156+
#Where spark jobs will be executed. Available options: yarn, emr.
157+
spark.submitApi=yarn
158+
159+
#Submit api = YARN
135160
sparkYarnSink.submitTimeout=160000
161+
sparkYarnSink.hadoopConfDir=/opt/hadoop
162+
sparkYarnSink.sparkHome=/opt/spark
163+
sparkYarnSink.master=yarn
136164
sparkYarnSink.filesToDeploy=
137165
sparkYarnSink.additionalConfs.spark.ui.port=
138166
sparkYarnSink.additionalConfs.spark.executor.extraJavaOptions=
@@ -145,6 +173,16 @@ sparkYarnSink.additionalConfs.spark.yarn.keytab=
145173
sparkYarnSink.additionalConfs.spark.yarn.principal=
146174
sparkYarnSink.additionalConfs.spark.shuffle.service.enabled=true
147175
sparkYarnSink.additionalConfs.spark.dynamicAllocation.enabled=true
176+
177+
#Submit api = EMR
178+
spark.emr.clusterId=
179+
spark.emr.filesToDeploy=
180+
spark.emr.additionalConfs=
181+
182+
#Common properties for Submit api = YARN and EMR
183+
sparkYarnSink.hadoopResourceManagerUrlBase=
184+
sparkYarnSink.userUsedToKillJob=
185+
spark.submit.thread.pool.size=10
148186
```
149187
```
150188
#Postgresql properties for connection to trigger database
@@ -155,7 +193,9 @@ db.password=
155193
db.keepAliveConnection=true
156194
db.connectionPool=HikariCP
157195
db.numThreads=4
196+
158197
db.skip.liquibase=false
198+
spring.liquibase.change-log=classpath:/db_scripts/liquibase/db.changelog.yml
159199
```
160200

161201
## Embedded Tomcat

src/main/resources/application.properties

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,19 @@ management.endpoint.health.show-details=always
3030
javamelody.management-endpoint-monitoring-enabled=true
3131

3232
# Health check settings
33-
health.databaseConnection.timeoutMillis=60000
33+
health.databaseConnection.timeoutMillis=120000
3434
health.yarnConnection.testEndpoint=/cluster/cluster
35-
health.yarnConnection.timeoutMillis=60000
35+
health.yarnConnection.timeoutMillis=120000
3636

3737
# How will users authenticate. Available options: inmemory, ldap
3838
auth.mechanism=inmemory
3939
# To disable, remove auth.admin.role property
4040
auth.admin.role=ROLE_ADMIN
4141
# INMEMORY authentication: username and password defined here will be used for authentication.
42-
auth.inmemory.user=user
43-
auth.inmemory.password=password
44-
auth.inmemory.admin.user=user-admin
45-
auth.inmemory.admin.password=password-admin
42+
auth.inmemory.user=hyperdriver-user
43+
auth.inmemory.password=hyperdriver-password
44+
auth.inmemory.admin.user=hyperdriver-admin-user
45+
auth.inmemory.admin.password=hyperdriver-admin-password
4646
# LDAP authentication: props template that has to be defined in case of LDAP authentication
4747
#auth.ad.domain=
4848
#auth.ad.server=

0 commit comments

Comments
 (0)