forked from ge-semtk/semtk
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.env
310 lines (264 loc) · 16 KB
/
.env
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
#!/bin/bash
#
#
# require that this file be sourced from the folder containing it (previously for Docker compose - possibly not needed now)
if [ ! -f ".env" ]; then
echo ".env needs to be in the current working folder of the process that sourced it"
exit 1
fi
# Helper functions
. ./.fun
os
hostip
sethostname
# Don't source this file twice.
if [ ! -z ${SOURCED_OSS_ENV+x} ]; then
echo "Returning (already ran .env)"
return
fi
export SOURCED_OSS_ENV=true
# Proxy settings [optional] - set if your network requires a proxy to connect to the Internet
export httpProxyHost=
export httpProxyPort=80
if [ -z "${httpProxyHost}" ]; then
export http_proxy=""
else
export http_proxy=http://${httpProxyHost}:${httpProxyPort}
fi
export httpsProxyHost=$httpProxyHost
export httpsProxyPort=$httpProxyPort
export https_proxy=$http_proxy
export no_proxy=${HOST_IP},localhost
export nonProxyHosts="${HOST_IP}|localhost"
## Volume map [optional] - Mapping of external to internal paths including the -v switch. Example $(pwd):/wd
export VOL_MAP="-v $(pwd):/wd -v $(pwd)/../../.m2:/root/.m2"
## RUN_OPTS [optional] - additional options to specify with the run comman. Example -e POSTGRES_DB=dbname
export RUN_OPTS="-e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e httpProxyHost=$httpProxyHost -e httpProxyPort=$httpProxyPort -e httpsProxyHost=$httpsProxyHost -e httpsProxyPort=$httpsProxyPort -e nonProxyHosts=$nonProxyHosts"
## JVM_OPTIONS [optional] values specified here will be passed to java processes
export JVM_OPTIONS=${JVM_OPTIONS:-"-Xmx20G -Xincgc"}
export JVM_OPTIONS_LARGE_MEMORY=${JVM_OPTIONS_LARGE_MEMORY:-"-Xmx50G -Xincgc"}
## setting SSL_ENABLED=${true (and populating 3 keystore settings) makes services accept HTTPS
export SSL_ENABLED=${SSL_ENABLED:-false}
export SSL_KEY_STORE_TYPE=${SSL_KEY_STORE_TYPE:-PKCS12}
export SSL_KEY_STORE=${SSL_KEY_STORE:-}
export SSL_KEY_STORE_PASSWORD=${SSL_KEY_STORE_PASSWORD:-}
## server
export SERVER_ADDRESS=${SERVER_ADDRESS:-}
export SERVICE_HOST=${SERVICE_HOST:-${HOST_IP}}
export WEB_HOST=${WEB_HOST:-${HOST_NAME}}
## ports
export PORT_SPARQLDB=${PORT_SPARQLDB:-2420}
export PORT_SPARQLGRAPH_WEB=${PORT_SPARQLGRAPH_WEB:-8860}
export PORT_SPARQL_QUERY_SERVICE=${PORT_SPARQL_QUERY_SERVICE:-12050}
export PORT_SPARQLGRAPH_STATUS_SERVICE=${PORT_SPARQLGRAPH_STATUS_SERVICE:-12051}
export PORT_SPARQLGRAPH_RESULTS_SERVICE=${PORT_SPARQLGRAPH_RESULTS_SERVICE:-12052}
export PORT_DISPATCH_SERVICE=${PORT_DISPATCH_SERVICE:-12053}
export PORT_HIVE_SERVICE=${PORT_HIVE_SERVICE:-12055}
export PORT_NODEGROUPSTORE_SERVICE=${PORT_NODEGROUPSTORE_SERVICE:-12056}
export PORT_ONTOLOGYINFO_SERVICE=${PORT_ONTOLOGYINFO_SERVICE:-12057}
export PORT_NODEGROUPEXECUTION_SERVICE=${PORT_NODEGROUPEXECUTION_SERVICE:-12058}
export PORT_NODEGROUP_SERVICE=${PORT_NODEGROUP_SERVICE:-12059}
export PORT_UTILITY_SERVICE=${PORT_UTILITY_SERVICE:-12060}
export PORT_INGESTION_SERVICE=${PORT_INGESTION_SERVICE:-12091}
export PORT_ATHENA_SERVICE=${PORT_ATHENA_SERVICE:-12062}
export PORT_EDCQUERYGEN_SERVICE=${PORT_EDCQUERYGEN_SERVICE:-12054}
export PORT_BINARYFILE_SERVICE=${PORT_BINARYFILE_SERVICE:-12064}
export PORT_ARANGODB_SERVICE=${PORT_ARANGODB_SERVICE:-12065}
export PORT_FDCSAMPLE_SERVICE=${PORT_FDCSAMPLE_SERVICE:-12066}
export PORT_FDCCACHE_SERVICE=${PORT_FDCCACHE_SERVICE:-12068}
export PORT_FILESTAGING_SERVICE=${PORT_FILESTAGING_SERVICE:-12069}
## dataset for jobs and EDC services
export SERVICES_DATASET_SERVER_URL=${SERVICES_DATASET_SERVER_URL:-http://${SERVICE_HOST}:${PORT_SPARQLDB}}
export SERVICES_DATASET_ENDPOINT_TYPE=${SERVICES_DATASET_ENDPOINT_TYPE:-virtuoso}
export SERVICES_DATASET_DOMAIN=${SERVICES_DATASET_DOMAIN:-http://}
export SERVICES_DATASET=${SERVICES_DATASET:-http://research.ge.com/semtk/services}
## dataset for integration testing
export TESTGRAPH_SERVER_URL=${TESTGRAPH_SERVER_URL:-${SERVICES_DATASET_SERVER_URL}}
export TESTGRAPH_ENDPOINT_TYPE=${TESTGRAPH_ENDPOINT_TYPE:-${SERVICES_DATASET_ENDPOINT_TYPE}}
## sparqldb service
export DATA_SPARQLDB=${DATA_SPARQLDB:-$(pwd)/sparqlDB/virtuoso}
export PWD_SPARQLDB=${PWD_SPARQLDB:-dba}
# protocol for calling the services
export SERVICE_PROTOCOL=${SERVICE_PROTOCOL:-http}
## sparql query service
export SPARQLQUERY_SERVICE_HOST=${SPARQLQUERY_SERVICE_HOST:-${SERVICE_HOST}}
export SPARQLQUERY_SERVICE_PROTOCOL=${SPARQLQUERY_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export SPARQLQUERY_SERVICE_ENDPOINT=${SPARQLQUERY_SERVICE_ENDPOINT:-/sparqlQueryService/query}
export SPARQLQUERY_SERVICE_ENDPOINT_AUTH=${SPARQLQUERY_SERVICE_ENDPOINT_AUTH:-/sparqlQueryService/queryAuth}
# TODO credentials should be made specific to dataset server url
export SPARQLQUERY_SERVICE_USER=${SPARQLQUERY_SERVICE_USER:-dba}
export SPARQLQUERY_SERVICE_PWD=${SPARQLQUERY_SERVICE_PWD:-dba}
export NEPTUNE_UPLOAD_S3_CLIENT_REGION=${NEPTUNE_UPLOAD_S3_CLIENT_REGION:-}
export NEPTUNE_UPLOAD_S3_BUCKET_NAME=${NEPTUNE_UPLOAD_S3_BUCKET_NAME:-}
export NEPTUNE_UPLOAD_S3_AWS_IAM_ROLE_ARN=${NEPTUNE_UPLOAD_S3_AWS_IAM_ROLE_ARN:-}
## status service
export STATUS_SERVICE_HOST=${STATUS_SERVICE_HOST:-${SERVICE_HOST}}
export STATUS_SERVICE_PROTOCOL=${STATUS_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export statusJobMaxWaitMsec=${statusJobMaxWaitMsec:-300000}
export statusLoggingEnabled=${statusLoggingEnabled:-false}
export statusApplicationLogName=${statusApplicationLogName:-StatusService}
## results service
export RESULTS_SERVICE_HOST=${RESULTS_SERVICE_HOST:-${SERVICE_HOST}}
export RESULTS_SERVICE_PROTOCOL=${RESULTS_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export RESULTS_SERVICE_MULTIPART_MAXFILESIZE=${RESULTS_SERVICE_MULTIPART_MAXFILESIZE:-1000MB}
export RESULTS_SERVICE_MULTIPART_MAXREQUESTSIZE=${RESULTS_SERVICE_MULTIPART_MAXREQUESTSIZE:-1000MB}
export resultsCleanUpThreadsEnabled=${resultsCleanUpThreadsEnabled:-YES}
export resultsCleanUpThreadsFrequency=${resultsCleanUpThreadsFrequency:-480}
export resultsAdditionalFileLocations=${resultsAdditionalFileLocations:-/tmp}
export resultsLoggingEnabled=${resultsLoggingEnabled:-false}
export resultsApplicationLogName=${resultsApplicationLogName:-ResultsService}
export resultsBaseURL=${resultsBaseURL:-${SERVICE_PROTOCOL}://${RESULTS_SERVICE_HOST}:${PORT_SPARQLGRAPH_RESULTS_SERVICE}}
export resultsServiceURL=${resultsServiceURL:-${SERVICE_PROTOCOL}://${RESULTS_SERVICE_HOST}:${PORT_SPARQLGRAPH_RESULTS_SERVICE}/results}
export resultsFileLocation=${resultsFileLocation:-/tmp/DISPATCH_RESULTS}
export resultsSampleLines=${resultsSampleLines:-100}
## dispatch service
export DISPATCH_SERVICE_HOST=${DISPATCH_SERVICE_HOST:-${SERVICE_HOST}}
export DISPATCH_SERVICE_PROTOCOL=${DISPATCH_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export LOCATION_ADDITIONAL_DISPATCHER_JARS=${LOCATION_ADDITIONAL_DISPATCHER_JARS:-""}
export DISPATCHER_CLASS_NAME=${DISPATCHER_CLASS_NAME:-com.ge.research.semtk.sparqlX.asynchronousQuery.AsynchronousNodeGroupDispatcher}
## hive service
export HIVE_SERVICE_HOST=${HIVE_SERVICE_HOST:-${SERVICE_HOST}}
export HIVE_SERVICE_PROTOCOL=${HIVE_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export hiveUsername=${hiveUsername:-hive}
export hivePassword=${hivePassword:-password}
export HIVE_LOGIN_TIMEOUT_SEC=${HIVE_LOGIN_TIMEOUT_SEC:-60}
## nodegroup store service
export NODEGROUPSTORE_SERVICE_HOST=${NODEGROUPSTORE_SERVICE_HOST:-${SERVICE_HOST}}
export NODEGROUPSTORE_SERVICE_PROTOCOL=${NODEGROUPSTORE_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export NODEGROUPSTORE_SERVICE_MULTIPART_MAXFILESIZE=${NODEGROUPSTORE_SERVICE_MULTIPART_MAXFILESIZE:-1000MB}
export storeSparqlServerDataDataset=${storeSparqlServerDataDataset:-http://research.ge.com/knowledge/prefab/data}
export storeSparqlServerModelDataset=${storeSparqlServerModelDataset:-http://research.ge.com/knowledge/prefab/model}
export storeSparqlServerDomain=${storeSparqlServerDomain:-http://}
## ontology info service
export ONTOLOGYINFO_SERVICE_HOST=${ONTOLOGYINFO_SERVICE_HOST:-${SERVICE_HOST}}
export ONTOLOGYINFO_SERVICE_PROTOCOL=${ONTOLOGYINFO_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export oinfoLoggingEnabled=${oinfoLoggingEnabled:-false}
export oinfoApplicationLogName=${oinfoApplicationLogName:-OntologyInfoService}
## nodegroup execution service
export NODEGROUPEXECUTION_SERVICE_HOST=${NODEGROUPEXECUTION_SERVICE_HOST:-${SERVICE_HOST}}
export NODEGROUPEXECUTION_SERVICE_PROTOCOL=${NODEGROUPEXECUTION_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export nodeGroupExecutionLoggingEnabled=${nodeGroupExecutionLoggingEnabled:-false}
export nodeGroupExecutionApplicationLogName=${nodeGroupExecutionApplicationLogName:-NodeGroupExecutionService}
## nodegroup service
export NODEGROUP_SERVICE_HOST=${NODEGROUP_SERVICE_HOST:-${SERVICE_HOST}}
export NODEGROUP_SERVICE_PROTOCOL=${NODEGROUP_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export NODEGROUP_SERVICE_MULTIPART_MAXFILESIZE=${NODEGROUP_SERVICE_MULTIPART_MAXFILESIZE:-1000MB}
## ingestion service
export INGESTION_SERVICE_HOST=${INGESTION_SERVICE_HOST:-${SERVICE_HOST}}
export INGESTION_SERVICE_PROTOCOL=${INGESTION_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export INGESTION_SERVICE_MULTIPART_MAXFILESIZE=${INGESTION_SERVICE_MULTIPART_MAXFILESIZE:-1000MB}
export ingestionBatchSize=${ingestionBatchSize:-16}
export ingestionLoggingEnabled=${ingestionLoggingEnabled:-false}
export ingestionApplicationName=${ingestionApplicationName:-IngestionService}
export ingestionLoadTrackAwsRegion=
export ingestionLoadTrackS3Bucket=
export ingestionLoadTrackFolder=
## fdc sample
export FDCSAMPLE_SERVICE_HOST=${FDCSAMPLE_SERVICE_HOST:-${SERVICE_HOST}}
## logging service
export LOGGING_SERVICE_HOST=${LOGGING_SERVICE_HOST:-${SERVICE_HOST}}
export LOGGING_SERVICE_PROTOCOL=${LOGGING_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export LOGGING_SERVICE_PORT=${LOGGING_SERVICE_PORT:-12092}
export LOGGING_SERVICE_ENDPOINT=${LOGGING_SERVICE_ENDPOINT:-/Logging/usageLog}
## athenaService
export ATHENA_SERVICE_HOST=${ATHENA_SERVICE_HOST:-${SERVICE_HOST}}
export ATHENA_AWS_REGION_ID=${ATHENA_AWS_REGION_ID:-us-east-1}
export ATHENA_AWS_S3_OUTPUT_BUCKET=${ATHENA_AWS_S3_OUTPUT_BUCKET:-s3://bucket}
export ATHENA_AWS_KEY=${ATHENA_AWS_KEY:-fake2391-23984-248}
export ATHENA_AWS_CLIENT_EXECUTION_TIMEOUT=${ATHENA_AWS_CLIENT_EXECUTION_TIMEOUT:-7000}
export ATHENA_TEST_DATABASE=${ATHENA_TEST_DATABASE:-dbget}
## arangoDbService
export ARANGODB_SERVICE_HOST=${ARANGODB_SERVICE_HOST:-${SERVICE_HOST}}
export ARANGODB_SERVICE_PROTOCOL=${ARANGODB_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export ARANGODB_USER=${ARANGODB_USER:-user}
export ARANGODB_PASSWORD=${ARANGODB_PASSWORD:-password}
export ARANGODB_TEST_SERVER=${ARANGODB_TEST_SERVER:-}
export ARANGODB_TEST_PORT=${ARANGODB_TEST_PORT:-8529}
## binaryFileService
export BINARYFILE_SERVICE_HOST=${BINARYFILE_SERVICE_HOST:-${SERVICE_HOST}}
export BINARY_SHARED_DIRECTORY=${BINARY_SHARED_DIRECTORY:-/mnt/island/materials}
export BINARY_HDFS_DOWNLOAD_SERVICE_PROTOCOL=${BINARY_HDFS_DOWNLOAD_SERVICE_PROTOCOL:-http}
export BINARY_HDFS_DOWNLOAD_SERVICE_HOST=${BINARY_HDFS_DOWNLOAD_SERVICE_HOST:-server001.com}
export BINARY_HDFS_DOWNLOAD_SERVICE_PORT=${BINARY_HDFS_DOWNLOAD_SERVICE_PORT:-34152}
export binaryLoggingEnabled=${binaryLoggingEnabled:-false}
export binaryApplicationLogName=${binaryApplicationLogName:-BinaryFileService}
## hive
export HIVE_TEST_SERVER=${HIVE_TEST_SERVER}
export HIVE_TEST_PORT=${HIVE_TEST_PORT:-10000}
export HIVE_TEST_DATABASE=${HIVE_TEST_DATABASE}
export HIVE_TEST_USERNAME=${HIVE_TEST_USERNAME}
export HIVE_TEST_PASSWORD=${HIVE_TEST_PASSWORD}
## kairos
export KAIROS_TEST_SERVER=${KAIROS_TEST_SERVER}
export KAIROS_TEST_PORT=${KAIROS_TEST_PORT:-34156}
export KAIROS_TEST_URL=${KAIROS_TEST_URL:-http://server001.com:8080}
export EDCQUERYGEN_SERVICE_HOST=${EDCQUERYGEN_SERVICE_HOST:-${SERVICE_HOST}}
# fileStagingService (default settings below use /tmp for both source and destination file system)
export FILESTAGING_SERVICE_HOST=${FILESTAGING_SERVICE_HOST:-${SERVICE_HOST}}
export FILESTAGING_SERVICE_PROTOCOL=${FILESTAGING_SERVICE_PROTOCOL:-${SERVICE_PROTOCOL}}
export FILESTAGING_STORE_TYPE=${FILESTAGING_STORE_TYPE:-directory}
export FILESTAGING_STAGE_DIR=${FILESTAGING_STAGE_DIR:-/tmp}
export FILESTAGING_DIRECTORY=${FILESTAGING_DIRECTORY:-/tmp}
export FILESTAGING_S3_REGION=
export FILESTAGING_S3_BUCKET=
## authorization
export AUTH_SETTINGS_FILE_PATH=${AUTH_SETTINGS_FILE_PATH:-NO_AUTH}
export AUTH_LOG_PATH=${AUTH_LOG_PATH:-/tmp/semtk_auth_log.txt}
export AUTH_REFRESH_FREQ_SEC=${AUTH_REFRESH_FREQ_SEC:-300 }
export AUTH_USERNAME_KEY=${AUTH_USERNAME_KEY:-user_name}
export AUTH_GROUP_KEY=${AUTH_GROUP_KEY:-group}
## semtk-sparqlgraph-web
export INGEST_URL=${INGEST_URL:-${SERVICE_PROTOCOL}://${INGESTION_SERVICE_HOST}:${PORT_INGESTION_SERVICE}/ingestion/}
export QUERY_URL=${QUERY_URL:-${SERVICE_PROTOCOL}://${SPARQLQUERY_SERVICE_HOST}:${PORT_SPARQL_QUERY_SERVICE}/sparqlQueryService/}
export STATUS_URL=${STATUS_URL:-${SERVICE_PROTOCOL}://${STATUS_SERVICE_HOST}:${PORT_SPARQLGRAPH_STATUS_SERVICE}/status/}
export RESULTS_URL=${RESULTS_URL:-${SERVICE_PROTOCOL}://${RESULTS_SERVICE_HOST}:${PORT_SPARQLGRAPH_RESULTS_SERVICE}/results/}
export DISPATCHER_URL=${DISPATCHER_URL:-${SERVICE_PROTOCOL}://${DISPATCH_SERVICE_HOST}:${PORT_DISPATCH_SERVICE}/dispatcher/}
export HIVE_URL=${HIVE_URL:-${SERVICE_PROTOCOL}://${HIVE_SERVICE_HOST}:${PORT_HIVE_SERVICE}/hiveService/}
export NGSTORE_URL=${NGSTORE_URL:-${SERVICE_PROTOCOL}://${NODEGROUPSTORE_SERVICE_HOST}:${PORT_NODEGROUPSTORE_SERVICE}/nodeGroupStore/}
export OINFO_URL=${OINFO_URL:-${SERVICE_PROTOCOL}://${ONTOLOGYINFO_SERVICE_HOST}:${PORT_ONTOLOGYINFO_SERVICE}/ontologyinfo/}
export NGEXEC_URL=${NGEXEC_URL:-${SERVICE_PROTOCOL}://${NODEGROUPEXECUTION_SERVICE_HOST}:${PORT_NODEGROUPEXECUTION_SERVICE}/nodeGroupExecution/}
export NG_URL=${NG_URL:-${SERVICE_PROTOCOL}://${NODEGROUP_SERVICE_HOST}:${PORT_NODEGROUP_SERVICE}/nodeGroup/}
export SECURE_REST_CLIENTS=${SECURE_REST_CLIENTS:-false}
## repeats of the above, renamed for the Tomcat web html and js
# --------------- IMPORTANT ---------------
# injected into javascript
export WEB_PROTOCOL=${WEB_PROTOCOL:-${SERVICE_PROTOCOL}}
export WEB_INGESTION_HOST=${WEB_INGESTION_HOST:-${WEB_HOST}}
export WEB_INGESTION_PORT=${WEB_INGESTION_PORT:-${PORT_INGESTION_SERVICE}}
export WEB_SPARQL_QUERY_HOST=${WEB_SPARQL_QUERY_HOST:-${WEB_HOST}}
export WEB_SPARQL_QUERY_PORT=${WEB_SPARQL_QUERY_PORT:-${PORT_SPARQL_QUERY_SERVICE}}
export WEB_STATUS_HOST=${WEB_STATUS_HOST:-${WEB_HOST}}
export WEB_STATUS_PORT=${WEB_STATUS_PORT:-${PORT_SPARQLGRAPH_STATUS_SERVICE}}
export WEB_RESULTS_HOST=${WEB_RESULTS_HOST:-${WEB_HOST}}
export WEB_RESULTS_PORT=${WEB_RESULTS_PORT:-${PORT_SPARQLGRAPH_RESULTS_SERVICE}}
export WEB_DISPATCH_HOST=${WEB_DISPATCH_HOST:-${WEB_HOST}}
export WEB_DISPATCH_PORT=${WEB_DISPATCH_PORT:-${PORT_DISPATCH_SERVICE}}
export WEB_HIVE_HOST=${WEB_HIVE_HOST:-${WEB_HOST}}
export WEB_HIVE_PORT=${WEB_HIVE_PORT:-${PORT_HIVE_SERVICE}}
export WEB_NODEGROUPSTORE_HOST=${WEB_NODEGROUPSTORE_HOST:-${WEB_HOST}}
export WEB_NODEGROUPSTORE_PORT=${WEB_NODEGROUPSTORE_PORT:-${PORT_NODEGROUPSTORE_SERVICE}}
export WEB_ONTOLOGYINFO_HOST=${WEB_ONTOLOGYINFO_HOST:-${WEB_HOST}}
export WEB_ONTOLOGYINFO_PORT=${WEB_ONTOLOGYINFO_PORT:-${PORT_ONTOLOGYINFO_SERVICE}}
export WEB_NODEGROUPEXECUTION_HOST=${WEB_NODEGROUPEXECUTION_HOST:-${WEB_HOST}}
export WEB_NODEGROUPEXECUTION_PORT=${WEB_NODEGROUPEXECUTION_PORT:-${PORT_NODEGROUPEXECUTION_SERVICE}}
export WEB_NODEGROUP_HOST=${WEB_NODEGROUP_HOST:-${WEB_HOST}}
export WEB_NODEGROUP_PORT=${WEB_NODEGROUP_PORT:-${PORT_NODEGROUP_SERVICE}}
export WEB_USER_ENDPOINT=${WEB_USER_ENDPOINT:-/user}
# wrap these values in single quotes
# there is no known way to include quotes in the text or html
export WEB_CUSTOM_BANNER_TEXT=${WEB_CUSTOM_BANNER_TEXT:-none}
export WEB_CUSTOM_STARTUP_DIALOG_TITLE=${WEB_CUSTOM_STARTUP_DIALOG_TITLE:-none}
export WEB_CUSTOM_STARTUP_DIALOG_HTML=${WEB_CUSTOM_STARTUP_DIALOG_HTML:-none}
export WEB_CUSTOM_AUTO_RUN_DEMO_FLAG=${WEB_CUSTOM_AUTO_RUN_DEMO_FLAG:-true}
# --------------- IMPORTANT ---------------
# apply any overrides to settings we just loaded
if [ -f DOT_SETTINGS ] && [ -f ENV_OVERRIDE ]; then
echo Error: both the older DOT_SETTINGS and newer ENV_OVERRIDE are present.
echo Remove one of them
exit 1
elif [ -f DOT_SETTINGS ]; then
echo Using deprecated DOT_SETTINGS. Please rename to ENV_OVERRIDE.
. ./DOT_SETTINGS
elif [ -f ENV_OVERRIDE ]; then
. ./ENV_OVERRIDE
fi