-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathuser_settings.py
213 lines (190 loc) · 7.8 KB
/
user_settings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
import os,sys,logging
logger = logging.getLogger(__name__)
import socket
logger.info('loading settings')
try:
INSTALL_PATH = os.environ['ARGOBALSAM_INSTALL_PATH']
DATA_PATH = os.environ['ARGOBALSAM_DATA_PATH']
ALLOWED_EXE_PATH = os.environ['ARGOBALSAM_EXE_PATH']
except KeyError,e:
logger.error('Environment not setup: ' + str(e))
raise
#------------------------------
# DATABASE CONFIG INFO
#------------------------------
USING_DB_LOGIN = False
DBUSER = ''
DBPASS = ''
if USING_DB_LOGIN:
DBUSER = os.environ['ARGOBALSAM_DBUSER']
DBPASS = os.environ['ARGOBALSAM_DBPASS']
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
default_db = {}
default_db['ENGINE'] = 'django.db.backends.sqlite3'
default_db['NAME'] = os.path.join(INSTALL_PATH,'db.sqlite3')
if USING_DB_LOGIN:
default_db['USER'] = DBUSER
default_db['PASSWORD'] = DBPASS
DATABASES = {
'default': default_db
}
#------------------------------
# BALSAM CONFIG INFO
#------------------------------
BALSAM_LOGGING_DIRECTORY = os.path.join(INSTALL_PATH, 'log') # where to store log files
BALSAM_WORK_DIRECTORY = os.path.join(DATA_PATH,'balsamjobs') # where to store local job data used for submission
BALSAM_DELETE_OLD_WORK = True # enable deletion of old folders in BALSAM_WORK_DIRECTORY at a period of BALSAM_DELETE_OLD_WORK_PERIOD
BALSAM_DELETE_OLD_WORK_PERIOD = 86400 # once a day check for old work folders older than the BALSAM_DELETE_OLD_WORK_AGE
BALSAM_DELETE_OLD_WORK_AGE = 86400 * 31 # delete work folders that are older than 31 days
BALSAM_SERVICE_LOG_FILENAME = os.path.join(BALSAM_LOGGING_DIRECTORY, 'balsam_service.log')
BALSAM_SCHEDULER_SUBMIT_EXE = '/usr/bin/qsub'
BALSAM_SCHEDULER_STATUS_EXE = '/usr/bin/qstat'
BALSAM_SCHEDULER_HISTORY_EXE = '/usr/bin/'
BALSAM_SERVICE_PERIOD = 10 # seconds between service loop execution
BALSAM_MAX_QUEUED = 20 # the maximum number of jobs allowed on the local queue
BALSAM_SUBMIT_JOBS = True # submit jobs to queue, turn off when testing
BALSAM_DEFAULT_QUEUE = '' # default local queue name
BALSAM_DEFAULT_PROJECT = '' # default local project name
BALSAM_ALLOWED_EXECUTABLE_DIRECTORY = ALLOWED_EXE_PATH # path to allowed executables
BALSAM_SITE = 'argo_cluster_dev' # local balsam site name
BALSAM_SCHEDULER_CLASS = 'CobaltScheduler' # local scheduler in use
BALSAM_SCHEDULER_SUBMIT_SCRIPT = os.path.join(BALSAM_ALLOWED_EXECUTABLE_DIRECTORY,'submit.sh')
BALSAM_SCHEDULER_USE_SUBMIT_SCRIPT = True
BALSAM_MAX_CONCURRENT_TRANSITIONS = 5 # maximum number of sub threads spawned by Balsam
#------------------------------
# ARGO CONFIG INFO
#------------------------------
ARGO_LOGGING_DIRECTORY = BALSAM_LOGGING_DIRECTORY
ARGO_SERVICE_LOG_FILENAME = os.path.join(ARGO_LOGGING_DIRECTORY,'argo_service.log')
ARGO_SERVICE_PERIOD = 10 # seconds between service loop execution
ARGO_WORK_DIRECTORY = os.path.join(DATA_PATH,'argojobs')
ARGO_DELETE_OLD_WORK = True # enable deletion of old folders in BALSAM_WORK_DIRECTORY at a period of ARGO_DELETE_OLD_WORK_PERIOD
ARGO_DELETE_OLD_WORK_PERIOD = 86400 # in seconds, once a day check for old work folders older than
ARGO_DELETE_OLD_WORK_AGE = 86400 * 31 # in seconds, delete work folders that are older than 31 days
ARGO_MAX_CONCURRENT_TRANSITIONS = 5 # maximum number of sub threads spawned by ARGO
#------------------------------
# GRID FTP SERVER INFO
#------------------------------
GRIDFTP_BIN = '/soft/data-transfer/globus/bin'
GRIDFTP_GLOBUS_URL_COPY = os.path.join(GRIDFTP_BIN,'globus-url-copy')
GRIDFTP_PROXY_INFO = os.path.join(GRIDFTP_BIN,'grid-proxy-info')
GRIDFTP_PROXY_INIT = os.path.join(GRIDFTP_BIN,'grid-proxy-init')
GRIDFTP_PROTOCOL = 'gsiftp://'
GRIDFTP_SERVER = 'atlasgridftp02.hep.anl.gov'
#------------------------------
# RABBITMQ/PIKA CONFIG
#------------------------------
RABBITMQ_SERVER_NAME = 'atlasgridftp02.hep.anl.gov'
RABBITMQ_SERVER_PORT = 5671
try:
RABBITMQ_SSL_CERT = os.environ['X509_USER_CERT']
RABBITMQ_SSL_KEY = os.environ['X509_USER_KEY']
RABBITMQ_SSL_CA_CERTS = os.environ['X509_CACERTS']
except KeyError,e:
logger.error('Environment variable undefined: ' + str(e))
raise
RABBITMQ_USER_EXCHANGE_NAME = 'argo_users_dev'
RABBITMQ_USER_JOB_QUEUE_NAME = 'argo_service_dev'
RABBITMQ_USER_JOB_ROUTING_KEY = 'argo_job_dev'
RABBITMQ_BALSAM_EXCHANGE_NAME = 'hpc_dev'
RABBITMQ_BALSAM_JOB_STATUS_QUEUE = 'balsam_job_status_dev'
RABBITMQ_BALSAM_JOB_STATUS_ROUTING_KEY = 'balsam_job_status_dev'
#------------------------------
# logging settings
#------------------------------
LOG_HANDLER_LEVEL = 'DEBUG'
LOG_BACKUP_COUNT = 5 # number of files worth of history
LOG_FILE_SIZE_LIMIT = 100 * 1024 * 1024 # file size at which to move to a new log file
LOG_HANDLER_FILENAME = os.path.join(ARGO_LOGGING_DIRECTORY,'argo_balsam.' + str(os.getpid()) + '.log')
if 'argo_service' in sys.argv:
LOG_HANDLER_FILENAME = ARGO_SERVICE_LOG_FILENAME
elif 'balsam_service' in sys.argv:
LOG_HANDLER_FILENAME = BALSAM_SERVICE_LOG_FILENAME
print 'logging to ' + str(LOG_HANDLER_FILENAME)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format' : '%(asctime)s|%(process)d|%(levelname)8s|%(name)s:%(lineno)s] %(message)s',
'datefmt' : "%d-%b-%Y %H:%M:%S"
},
},
'handlers': {
'time_rotating': {
'level':LOG_HANDLER_LEVEL,
'class':'logging.handlers.TimedRotatingFileHandler',
'filename': LOG_HANDLER_FILENAME,
'when' : 'W0',
'interval' : 0,
'formatter': 'standard',
'backupCount': '2',
},
'console': {
'class':'logging.StreamHandler',
'formatter': 'standard',
},
'default': {
'level':LOG_HANDLER_LEVEL,
'class':'logging.handlers.RotatingFileHandler',
'filename': LOG_HANDLER_FILENAME,
'maxBytes': LOG_FILE_SIZE_LIMIT,
'backupCount': LOG_BACKUP_COUNT,
'formatter': 'standard',
}
},
'loggers': {
'django':{
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True,
},
'argo': {
'handlers': ['default'],
'level': 'DEBUG',
},
'common': {
'handlers': ['default'],
'level': 'DEBUG',
},
'console': {
'handlers': ['console'],
'level': 'INFO',
},
'balsam': {
'handlers': ['default'],
'level': 'DEBUG',
},
'django.db.backends': {
'handlers':['default'],
'level': 'WARNING',
},
'pika.adapters.base_connection': {
'handlers':['default'],
'level':'WARNING',
},
}
}
#------------------------------
# Sanity Checks
#------------------------------
# ensure that requisite paths exist
for d in [
INSTALL_PATH,
DATA_PATH,
ALLOWED_EXE_PATH,
BALSAM_LOGGING_DIRECTORY,
BALSAM_WORK_DIRECTORY,
BALSAM_ALLOWED_EXECUTABLE_DIRECTORY,
ARGO_LOGGING_DIRECTORY,
ARGO_WORK_DIRECTORY,
GRIDFTP_GLOBUS_URL_COPY,
GRIDFTP_PROXY_INFO,
GRIDFTP_PROXY_INIT,
RABBITMQ_SSL_CERT,
RABBITMQ_SSL_KEY,
RABBITMQ_SSL_CA_CERTS,
]:
if not os.path.exists(d):
raise Exception('Path does not exist: ' + d)