User Tools

Site Tools


cluster:207

Differences

This shows you the differences between two versions of the page.

Link to this comparison view

Both sides previous revision Previous revision
Next revision
Previous revision
Next revision Both sides next revision
cluster:207 [2021/10/11 17:11]
hmeij07 [Slurm Test]
cluster:207 [2021/10/12 16:42]
hmeij07
Line 173: Line 173:
  
 ** SLURM installation ** ** SLURM installation **
 +
 +Configured and compiled on ''greentail52'' despite not having gpus...only library manager is needed (nvml)
  
 <code> <code>
  
-source /share/apps/CENTOS7/amber/miniconda3/etc/profile.d/conda.sh +#source /share/apps/CENTOS7/amber/miniconda3/etc/profile.d/conda.sh 
-export PATH=/share/apps/CENTOS7/amber/miniconda3/bin:$PATH +#export PATH=/share/apps/CENTOS7/amber/miniconda3/bin:$PATH 
-export LD_LIBRARY_PATH=/share/apps/CENTOS7/amber/miniconda3/lib:$LD_LIBRARY_PATH +#export LD_LIBRARY_PATH=/share/apps/CENTOS7/amber/miniconda3/lib:$LD_LIBRARY_PATH 
-which mpirun python conda+#which mpirun python conda
  
-# cuda 9.2 +# cuda 9.2 ... configure finds /usr/local/cuda which points to n37-cuda-9.2 
-export CUDAHOME=/usr/local/n37-cuda-9.2 +#export CUDAHOME=/usr/local/n37-cuda-9.2 
-export PATH=/usr/local/n37-cuda-9.2/bin:$PATH +#export PATH=/usr/local/n37-cuda-9.2/bin:$PATH 
-export LD_LIBRARY_PATH=/usr/local/n37-cuda-9.2/lib64:$LD_LIBRARY_PATH +#export LD_LIBRARY_PATH=/usr/local/n37-cuda-9.2/lib64:$LD_LIBRARY_PATH 
-which nvcc +#which nvcc
- +
- export PATH=/share/apps/CENTOS7/openmpi/4.0.4/bin:$PATH +
- export LD_LIBRARY_PATH=/share/apps/CENTOS7/openmpi/4.0.4/lib:$LD_LIBRARY_PATH +
- which mpirun+
  
 +# just in case
 +export PATH=/share/apps/CENTOS7/openmpi/4.0.4/bin:$PATH
 +export LD_LIBRARY_PATH=/share/apps/CENTOS7/openmpi/4.0.4/lib:$LD_LIBRARY_PATH
 +which mpirun
  
 +# /usr/local/slurm is symbolic link to slurm-21.08.1
 ./configure \ ./configure \
 --prefix=/usr/local/slurm-21.08.1 \ --prefix=/usr/local/slurm-21.08.1 \
 --sysconfdir=/usr/local/slurm-21.08.1/etc \ --sysconfdir=/usr/local/slurm-21.08.1/etc \
---with-nvml=/usr/local/n37-cuda-9.2 \ + | tee -a install.log 
--with-hdf5=no | tee -a install.log+# not --with-nvml=/usr/local/n37-cuda-9.2 \ 
 +# not -with-hdf5=no  \ 
  
 # known hdf5 library problem when including --with-nvml # known hdf5 library problem when including --with-nvml
Line 203: Line 208:
 config.status: creating src/plugins/gpu/nvml/Makefile config.status: creating src/plugins/gpu/nvml/Makefile
  
 +
 +
 +Libraries have been installed in:
 +   /usr/local/slurm-21.08.1/lib/slurm
 +
 +If you ever happen to want to link against installed libraries
 +in a given directory, LIBDIR, you must either use libtool, and
 +specify the full pathname of the library, or use the '-LLIBDIR'
 +flag during linking and do at least one of the following:
 +   - add LIBDIR to the 'LD_LIBRARY_PATH' environment variable
 +     during execution
 +   - add LIBDIR to the 'LD_RUN_PATH' environment variable
 +     during linking
 +   - use the '-Wl,-rpath -Wl,LIBDIR' linker flag
 +   - have your system administrator add LIBDIR to '/etc/ld.so.conf'
 +
 +</code>
 +
 +<code>
  
 export PATH=/usr/local/slurm/bin:$PATH export PATH=/usr/local/slurm/bin:$PATH
cluster/207.txt · Last modified: 2023/10/27 18:47 by hmeij07