This repository has been archived by the owner on Jan 20, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 14
/
Copy pathSConstructGeneral
372 lines (325 loc) · 20.6 KB
/
SConstructGeneral
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
# SConstruct file to include for a program that uses the opendihu library
#
# In a directory where you have the sources for your user program, place a SConstruct file with the following contents:
##--------------------------------------------------------------------------------------------------------
## import os
##
## # get the directory where opendihu is installed (the top level directory of opendihu)
## opendihu_home = os.environ.get('OPENDIHU_HOME')
##
## # set path where the "SConscript" file is located (set to current path)
## path_where_to_call_sconscript = Dir('.').srcnode().abspath
##
## # call general SConstruct that will configure everything and then call SConscript at the given path
## SConscript(os.path.join(opendihu_home,'SConstructGeneral'),
## exports={"path": path_where_to_call_sconscript})
##
##--------------------------------------------------------------------------------------------------------
#
import os
import sys
import random
import socket
import platform
sys.path.append("dependencies/scons-config/dist/scons_config-0.1-py3.6.egg") # load scons_config
import sconsconfig
from sconsconfig import packages, tools
# import the path where a SConstruct should be called at the end
Import("path")
path_where_to_call_sconscript = path
opendihu_home = Dir('.').srcnode().abspath # opendihu home directory is where this file is located
#
# Select the packages we want to use in the configuration.
# The order is relevant, depending packages should be after the packages they depend on.
#
sconsconfig.select(
packages.MPI(required=True), # This searches for the MPI implementation
#packages.LAPACK(required=False), # This downloads and compiles OpenBLAS which implements the interfaces of both LAPACK and BLAS. It is only needed for the svg utility for MOR, which is currently not really required
#packages.bison(required=False), # parser generater needed by PTScotch which is needed by MUMPS which is needed by Petsc
#packages.flex(required=False), # "Fast Lexical Analyzer" needed by PTScotch which is needed by MUMPS which is needed by Petsc
packages.PETSc(required=True), # Petsc depends on LAPACK/BLAS and bison
#packages.bzip2(required=False),
#packages.zlib(required=False), # zlib is needed to build python on hawk
packages.Python(required=True), # This compiles python 3.9 or python 3.6 from source to be able to embedd the python interpreter in opendihu. All further python packages are installed in this installation tree under dependencies/python/install
packages.pythonPackages(required=False), # all further python utils that can be installed via pip
packages.Base64(required=True), # Base64 is an encoding library that is needed for binary VTK output.
packages.googletest(required=False), # This is the testing framework.
#packages.Sphinx(required=True),
#packages.libcellml(required=True),# relies on libxml2 and SWIG
packages.SEMT(required=False), # This is a small C++ symbolic differentiation toolbox that is used for solid mechanics.
packages.ADIOS(required=False), # ADIOS, a library that handles efficient and parallel data output
#packages.MegaMol(required=False), # Adds the MegaMol visualization framework
#packages.VTK(required=False), # VTK, needed only for chaste
#packages.HDF5(required=False), # The parallel output library HDF5, compiled with --enable-parallel as needed for chaste, the version that Petsc can download does not have this feature
#packages.XercesC(required=False), # XML-parser, needed for chaste
#packages.xsd(required=False), # XML Schema to C++ data binding compiler, needed for chaste
#packages.boost(required=False), # boost C++ library, needed for chaste
#packages.Chaste(required=False), # The Chaste framework, only the nonlinear mechanics solver is used
packages.Vc(required=True), # The Vc types for explicit vectorization
packages.std_simd(required=True), # std::simd vectorization library that needs C++17
packages.xbraid(required=False), # framework for multigrid in time methods
packages.OpenCOR(required=False), # binary that converts CellML files from xml to c
packages.libxml2(required=False), # parser for xml files, needed for precice
packages.precice(required=False), # numerical coupling library
packages.EasyLoggingPP(required=True), # This is the logging library, it has to be the last package to be included because it contains a source (.c) file that needs to be compiled and linked to opendihu and somehow otherwise scons-config does not work.
)
# By default set the packages' location to the dependency directory. This can be overridden in 'user-variables.scons.py'.
# Note that an absolute path has to be given, otherwise there are problems finding the libraries
# read variables from user file 'user-variables.scons.py' and from command line arguments, e.g. `scons BUILD_TYPE=release`
variables = ARGUMENTS.copy() # command line arguments are in the global ARGUMENTS dictionary
#variables.update(preset_variables) # merge in preset_variables dictionary
vars = Variables(['user-variables.scons.py','../user-variables.scons.py'], variables)
# specify type and default value for recognized variables
vars.Add(EnumVariable('BUILD_TYPE', 'The build type, according to that different compiler options will be set. '\
'(Shortcuts: ''d'',''r'',''rd''=''dr'', ''p'')', 'release',
allowed_values=('debug', 'release', 'releasewithdebuginfo', 'preprocess', 'assembly'), ignorecase = 2,
map={'d':'debug', 'r':'release', 'rd':'releasewithdebuginfo', 'dr':'releasewithdebuginfo', 'p':'preprocess'}))
vars.Add(BoolVariable('travis_ci', 'Do not compile and run tests, but compile all examples.', 0))
vars.Add(BoolVariable('circle_ci', 'Subset of unit tests for circle ci (has limited parallel capabilities.)', 0))
vars.Add(BoolVariable('no_tests', 'Do not compile and run tests.', 0))
vars.Add(BoolVariable('no_examples', 'Do not compile examples.', 0))
vars.Add(BoolVariable('gprof', 'Include flags for gprof profiling.', False))
vars.Add(BoolVariable('no_vectorization', 'Turn off SEE and AVX commands.', False))
vars.Add(BoolVariable('USE_CRAY_PAT', 'Compile with craypat regions.', False))
vars.Add(BoolVariable('USE_EXTRAE', 'Compile with extrae support. Not needed if you don\'t want to use `#include "extrae.h"` in the code.', False))
vars.Add(BoolVariable('USE_SCOREP', 'Compile with scorep compiler wrapper.', False))
vars.Add(BoolVariable('USE_HPL', 'Compile with -hpl option.', False))
vars.Add(BoolVariable('USE_MEGAMOL', 'Use additional linker flags to link to the megamol source file.', False))
vars.Add(BoolVariable('VERBOSE', 'Output raw shell commands for compilation.', False))
vars.Add(BoolVariable('ph', 'Build precompiled headers.', False))
vars.Add(BoolVariable('USE_VECTORIZED_FE_MATRIX_ASSEMBLY', 'Enable code that matrix assembly for FEM matrices uses the Vc library.', False))
vars.Add(BoolVariable('USE_STDSIMD', 'Use the std::experimental::simd library which supports AVX-512, instead of Vc. This automatically changes c++14 to c++17.', False))
vars.Add(BoolVariable('USE_MPI_ALLOC', 'Use calls to MPI_Win_allocate instead of MPI_Win_create.', False))
vars.Add('mpicc', help='The mpicc compiler wrapper', default='mpicc')
vars.Add('mpiCC', help='The mpic++ compiler wrapper', default='mpic++')
vars.Add('cc', help='The c compiler', default='gcc')
vars.Add('CC', help='The c++ compiler', default='g++')
vars.Add('cmake', help='The cmake executable', default='cmake')
# Add options from any packages we want to use.
sconsconfig.add_options(vars)
# initialize environment object containing build system settings such as compilers and flags
# command line options are considered
env = Environment(tools = ['default'], toolpath = ['config/tools'], variables = vars, ENV = os.environ)
vars.Save('out.py',env)
# check for unknown variables and output warning
unknown = vars.UnknownVariables()
if unknown:
print("WARNING: Unknown variables from command line arguments or config file 'user-variables.scons.py':", unknown.keys())
# generate help text for the page `scons -h`
Help(vars.GenerateHelpText(env))
# set console messages for compilation and linking
tty_available = True
try:
screen = open('/dev/tty', 'w')
except:
tty_available = False
if ARGUMENTS.get('VERBOSE') != '1' and tty_available:
env['CXXCOMSTR'] = "\x1b[32mCompiling $SOURCE\x1b[0m"
env['CCCOMSTR'] = "\x1b[32mCompiling $SOURCE\x1b[0m"
env['ASCOMSTR'] = "\x1b[32mAssembling $TARGET\x1b[0m"
env['LINKCOMSTR'] = "\x1b[32mLinking $TARGET\x1b[0m"
env['ARCOMSTR'] = "\x1b[36mBuilding archive $TARGET\x1b[0m"
env['RANLIBCOMSTR'] = "\x1b[36mCreating archive index\x1b[0m"
env['GCHSTR'] = "\x1b[32mCompiling header $SOURCE\x1b[0m"
# set compiler, "CC" is C++, "cc" is C
env['CXX'] = env['CC']
env['CC'] = env['cc']
if not 'cmake' in env:
env['cmake'] = 'cmake'
if not env.GetOption('clean'):
# avoid Configure if we only clean targets
# Create configuration environment, passing the set of custom tests.
sconf = env.Configure(custom_tests=sconsconfig.custom_tests)
# Run custom tests with any options needed.
sconsconfig.check(sconf)
# Finish the configuration and save it to file.
sconf.Finish()
# disable unit tests on hazel hen
if os.environ.get("PE_ENV") is not None: # if on hazelhen
env['no_tests'] = True
# -----------------------------------------------
# progress display e.g. [ 50%]
# set needed global variables
env["node_count"] = 0 # current counter of nodes to compile
env["node_count_max"] = 0 # maximum number of nodes to compile that was determined earlier
env["node_count_interval"] = 1 # the interval in which the progress output should be updated
node_count_fname = str(env.Dir('#')) + '/.scons_node_count' # filename of the stored maximum node count
env["node_count_fname"]=node_count_fname
def progress_function(node):
""" function to be called before a new node is compiled, outputs progress information like [ 50%] if it is <=100%"""
global env
env["node_count"] += env["node_count_interval"]
if env["node_count"] > env["node_count_max"]: env["node_count_max"] = 0
if env["node_count_max"]>0 and tty_available:
sys.stdout.write('\r[%3d%%] ' % (env["node_count"]*100/env["node_count_max"]))
sys.stdout.flush()
# write out
try:
with open(env["node_count_fname"], 'w') as f: f.write('%d\n' % env["node_count"])
except: pass
# read in maximum node count from an earlier compilation
try:
with open(env["node_count_fname"]) as f: env["node_count_max"] = int(f.readline())
except: pass
# add progress display target
Progress(progress_function, interval=env["node_count_interval"])
# add notification when compilation is complete
if os.environ.get("PE_ENV") == None:
if "notifyset" not in env:
env["notifyset"] = True
target='notify-send'+str(random.random())
if env['no_tests']:
notify = env.Command(target=target, source = None, action = 'notify-send "Compilation of opendihu core completed." -a opendihu || true')
else:
notify = env.Command(target=target, source = None, action = 'notify-send "Compilation of opendihu core completed." "You can now abort compilation of unit tests, if not needed." -a opendihu || true')
#AlwaysBuild('notify-send')
if env["BUILD_TYPE"] == "debug":
Depends(notify, 'core/build_debug/libopendihud.a')
else:
Depends(notify, 'core/build_release/libopendihu.a')
# ensure that PYTHONPATH points to scripts directory which contains python scripts
current_python_path = os.environ.get("PYTHONPATH")
if current_python_path is None:
os.environ["PYTHONPATH"] = os.path.join(opendihu_home, "scripts")+":"+os.path.join(opendihu_home, "scripts/geometry_manipulation")
elif "scripts" not in current_python_path:
os.environ["PYTHONPATH"] = current_python_path+":"+os.path.join(opendihu_home, "scripts")+":"+os.path.join(opendihu_home, "scripts/geometry_manipulation")
# check if input directory exists
input_dir = os.path.join(opendihu_home, "examples/electrophysiology/input")
input_exists = os.path.isdir(input_dir)
# check that input directory is non-empty
if input_exists:
if not os.listdir(input_dir):
input_exists = False
# if the input directory does not exist, print a warning with instructions how to download the file
if not input_exists:
print("\n\x1b[31mWarning! No input directory exists under examples/electrophysiology/input.\nPlease do the following: \n"
" * Download the input.tgz file from https://doi.org/10.5281/zenodo.4705944 \n and store it under examples/electrophysiology\n"
" * Change to the directory and uncompress the file as follows:\n"
" tar xf input.tgz\n"
" * Make sure that the directory examples/electrophysiology/input contains the files.\n Then run scons again.\n\n\x1b[0m")
# -----------------------------------------------
# Depending on the variable BUILD_TYPE either build with debug, releasewithdebuginfo or release settings and link corresponding version of core
# All the flags may only work with the GCC compiler, tested with GCC 7.3.0 and newer.
# When using a different compiler that supports C++14, add some 'if's to the next section.
if env["BUILD_TYPE"] == "debug":
# debug build
variant_dir = os.path.join(path_where_to_call_sconscript,"build_debug") # output folder for build
if env['CC'] != "pgcc": # not pgi compiler
env.MergeFlags('-ftemplate-backtrace-limit=0')
env.MergeFlags('-Wall -Wpedantic -Wno-sign-compare -Wno-error=sign-compare')
env.MergeFlags('-DDEBUG -ggdb3 -g3 -Og -DELPP_FEATURE_CRASH_LOG') # gcc flags, will be sorted automatically into linker and compiler flags
else: # using pgi compiler here:
# env.MergeFlags('-O0 -g -ta=host,tesla:cc60,time -Minfo=accel -noswitcherror -std=c++14 -DELPP_FEATURE_CRASH_LOG -laccg -laccapi -laccn -laccg2 -lcudadevice')
env.MergeFlags('-O0 -g -std=c++14 -DELPP_FEATURE_CRASH_LOG')
env.MergeFlags('-DDEBUG -D__SSE__')
if not env["USE_MEGAMOL"]:
#env.MergeFlags('-Werror')
pass
env.Append(LIBPATH = [os.path.join(opendihu_home, 'core/build_debug')]) # add debug version of opendihu library
env.Prepend(LIBS = ['opendihud'])
elif env["BUILD_TYPE"] == "releasewithdebuginfo":
# release with debug info build
variant_dir = os.path.join(path_where_to_call_sconscript,'build_release_with_debug_info') # folder of build
env.MergeFlags('-DRELWITHDEBINFO -O3 -ggdb3 -g3 -pg') # gcc flags, will be sorted automatically into linker and compiler flags
env.Append(LIBPATH = [os.path.join(opendihu_home, 'core/build_release_with_debug_info')]) # add release with debug info version of opendihu library
env.Prepend(LIBS = ['opendihurd'])
elif env["BUILD_TYPE"] == "preprocess":
# only the preprocessing step
variant_dir = os.path.join(path_where_to_call_sconscript,'preprocess') # output folder for build
env.MergeFlags('-DDEBUG -O2 -ggdb3 -g3 -DELPP_DISABLE_DEBUG_LOGS -DELPP_DISABLE_VERBOSE_LOGS -DELPP_DISABLE_TRACE_LOGS -E -C -P ') # gcc flags, will be sorted automatically into linker and compiler flags
elif env["BUILD_TYPE"] == "assembly":
# do the assembly
variant_dir = os.path.join(path_where_to_call_sconscript,'assembly') # output folder for build
env.MergeFlags('-DDEBUG -S -fverbose-asm -g -O2 -DELPP_DISABLE_DEBUG_LOGS -DELPP_DISABLE_VERBOSE_LOGS -DELPP_DISABLE_TRACE_LOGS ') # gcc flags, will be sorted automatically into linker and compiler flags
else:
# release build
variant_dir = os.path.join(path_where_to_call_sconscript,'build_release') # output folder for build
env.Append(LIBPATH = [os.path.join(opendihu_home, 'core/build_release')]) # add release version of opendihu library
env.Prepend(LIBS = ['opendihu'])
env.MergeFlags('-DELPP_DISABLE_DEBUG_LOGS -DELPP_DISABLE_VERBOSE_LOGS -DELPP_DISABLE_TRACE_LOGS -DELPP_DISABLE_LOGGING_FLAGS_FROM_ARG -DNDEBUG -DDISABLE_LOG_SCOPE')
if os.environ.get("PE_ENV") == "CRAY":
env.MergeFlags('-O3') # fastest with cray (same performance as -02)
elif env["CC"] == "pgcc":
#env.MergeFlags('-g -O3 -fastsse -Mvect=simd -noswitcherror -std=c++14 -acc -ta=host,tesla:managed,cc35,cc60,time,cuda10.0 -laccg -laccapi -laccn -laccg2 -lcudadevice')
#env.MergeFlags('-fastsse -g -noswitcherror -std=c++14 -Mvect=simd -acc -ta=host,tesla:cc60,time -Minfo=accel -laccg -laccapi -laccn -laccg2 -lcudadevice')
env.MergeFlags('-noswitcherror -std=c++14') # managed,
else:
env.MergeFlags('-Ofast') # fastest with GNU and Intel
if env["gprof"]:
print("Adding flags for gprof.")
env.Append(LINKFLAGS = "-pg") # add gprof flags for linker
env.MergeFlags('-pg')
if env["USE_SCOREP"] or env["USE_EXTRAE"]:
env.MergeFlags('-g')
if env["USE_CRAY_PAT"]:
print("USE_CRAY_PAT=True: Compiling with -DHAVE_PAT (uses PAT_region_begin())\n")
env.Append(CPPDEFINES = "-DHAVE_PAT")
if os.environ.get("PE_ENV") != "CRAY":
env.MergeFlags('-finstrument-functions') # instrument functions for pat_run, works with GNU and Intel
if env["USE_EXTRAE"]:
if 'EXTRAE_HOME' not in os.environ:
print("ERROR: $EXTRAE_HOME is not set. Did you forget to `module load extrae`?")
else:
print("USE_EXTRAE=True: Compiling with -DHAVE_EXTRAE\n")
env.Append(CPPDEFINES = "-DHAVE_EXTRAE")
env.MergeFlags('-I{EXTRAE_HOME}/include -L{EXTRAE_HOME}/lib -lmpitrace'.format(EXTRAE_HOME=os.environ['EXTRAE_HOME']))
if env["USE_SCOREP"]:
print("Using scorep compiler wrapper, this only works on hawk.")
if 'SCOREP_ROOT' not in os.environ:
print("ERROR: $SCOREP_ROOT is not set. Did you forget to `module load scorep`?")
if 'SCOREP_WRAPPER_INSTRUMENTER_FLAGS' not in os.environ:
# --nomemory to reduce scons memory requirements
# --thread=none to avoid eror about confliciting Opari2 and pthreads adapter.
print("ERROR: $SCOREP_WRAPPER_INSTRUMENTER_FLAGS is not set. Did you forget to `export SCOREP_WRAPPER_INSTRUMENTER_FLAGS='--nomemory --thread=none'`?")
if "ZLIB_ROOT" not in os.environ:
print("ERROR: $ZLIB_ROOT is not set. Did you forget to `module load zlib`?")
env["CXX"] = os.path.join(opendihu_home, "scripts/fake-scorep.sh")
if env["circle_ci"]:
env.MergeFlags('-DHAVE_CIRCLE_CI')
# flags to always include, except with cray compiler
if os.environ.get("PE_ENV") != "CRAY" and "pg" not in env["cc"]:
env.MergeFlags('-Wunused-variable') # always warn about unused variables
env.MergeFlags('-fopenmp')
if env["USE_STDSIMD"]:
env.MergeFlags('-std=c++17')
else:
env.MergeFlags('-std=c++14')
if env["USE_MPI_ALLOC"]:
env.MergeFlags('-DUSE_MPI_ALLOC')
if "ppc" in platform.processor():
env.MergeFlags('-mcpu=native -mtune=native')
else:
if os.environ.get("PE_ENV") != "INTEL" and not env["no_vectorization"]:
env.MergeFlags('-march=native')
env.MergeFlags('-Wl,--disable-new-dtags')
if os.environ.get("PE_ENV") == "CRAY" and env["USE_HPL"]:
print("USE_HPL=True: Compiling with -hpl option\n")
env.MergeFlags('-hpl={}/opendihu.pl'.format(opendihu_home))
if env["no_vectorization"]:
print("Warning: Compile without vectorization (option no_vectorization)!")
env.MergeFlags('-mno-sse2 -mno-sse3 -mno-ssse3 -mno-sse4 -mno-sse4a -mno-fma4 -mno-avx')
if env["USE_VECTORIZED_FE_MATRIX_ASSEMBLY"]:
print("Vectorized FE matrix assembly is enabled.")
env.MergeFlags(' -DUSE_VECTORIZED_FE_MATRIX_ASSEMBLY')
# add basic linker flags for linux
env.MergeFlags("-lpthread -ldl -lutil -lm")
# add defines that are used in the code
env.Append(CPPDEFINES = {'-DOPENDIHU_HOME': '\\"'+str(opendihu_home)+'\\"'}) # add define of the directory where this file is located
env.Append(CPPDEFINES = {'-DC_COMPILER_COMMAND': '\\"'+env["CC"]+'\\"'}) # add compile command, e.g. gcc
env.Append(CPPDEFINES = {'-DCXX_COMPILER_COMMAND': '\\"'+env["CXX"]+'\\"'}) # add compile command, e.g. g++
if os.path.isfile("/.dockerenv"):
env.Append(CPPDEFINES = '-DINSIDE_DOCKER_CONTAINER')
env.Append(CPPDEFINES = "-DELPP_NO_DEFAULT_LOG_FILE") # disable creation of the "myeasylog.log" default log file
# Set the include path for header files
env.Append(CPPPATH = [os.path.join(opendihu_home, 'core/src')])
#print(env.Dump())
# output compilers
if "mpiCC" in env:
print("Compilers: C: {}, C++: {}, MPI: {}".format(env['CC'],env['CXX'],env['mpiCC']))
else:
print("Compilers: C: {}, C++: {}, MPI: -- ".format(env['CC'],env['CXX']))
# call SConscript file for the actual build in the specified build directory variant_dir
SConscript(dirs=path_where_to_call_sconscript,
variant_dir=variant_dir,
duplicate=False,
exports="env")