From 790e8267bf46ac009fbba58a44fb128414bcad4c Mon Sep 17 00:00:00 2001 From: emilannevelink Date: Thu, 8 Jun 2023 12:48:34 -0400 Subject: [PATCH 1/2] add nvidia benchmark --- benchmarks/lammps-allegro/.gitignore | 8 + benchmarks/lammps-allegro/README.md | 1 + benchmarks/lammps-allegro/data.LiMg.in | 258 ++++++++++++++++++ benchmarks/lammps-allegro/get_performance.py | 31 +++ benchmarks/lammps-allegro/in.lj.txt | 30 ++ benchmarks/lammps-allegro/install.sh | 56 ++++ .../lammps-allegro/potential_files/meam.app | 38 +++ .../lammps-allegro/potential_files/meam.lib | 6 + benchmarks/lammps-allegro/run.LiMg2nnmeam.in | 21 ++ benchmarks/lammps-allegro/submit.sh | 31 +++ benchmarks/lammps-nvidia/.gitignore | 4 + benchmarks/lammps-nvidia/README.md | 3 + benchmarks/lammps-nvidia/get_performance.py | 32 +++ benchmarks/lammps-nvidia/in.lj.txt | 30 ++ benchmarks/lammps-nvidia/install.sh | 7 + benchmarks/lammps-nvidia/run.sh | 19 ++ benchmarks/lammps-nvidia/submit.sh | 16 ++ 17 files changed, 591 insertions(+) create mode 100644 benchmarks/lammps-allegro/.gitignore create mode 100644 benchmarks/lammps-allegro/README.md create mode 100644 benchmarks/lammps-allegro/data.LiMg.in create mode 100644 benchmarks/lammps-allegro/get_performance.py create mode 100644 benchmarks/lammps-allegro/in.lj.txt create mode 100644 benchmarks/lammps-allegro/install.sh create mode 100644 benchmarks/lammps-allegro/potential_files/meam.app create mode 100644 benchmarks/lammps-allegro/potential_files/meam.lib create mode 100644 benchmarks/lammps-allegro/run.LiMg2nnmeam.in create mode 100644 benchmarks/lammps-allegro/submit.sh create mode 100644 benchmarks/lammps-nvidia/.gitignore create mode 100644 benchmarks/lammps-nvidia/README.md create mode 100644 benchmarks/lammps-nvidia/get_performance.py create mode 100644 benchmarks/lammps-nvidia/in.lj.txt create mode 100755 benchmarks/lammps-nvidia/install.sh create mode 100755 benchmarks/lammps-nvidia/run.sh create mode 100644 benchmarks/lammps-nvidia/submit.sh diff --git a/benchmarks/lammps-allegro/.gitignore b/benchmarks/lammps-allegro/.gitignore new file mode 100644 index 0000000..cb04cf6 --- /dev/null +++ b/benchmarks/lammps-allegro/.gitignore @@ -0,0 +1,8 @@ +logs/ +lammps/ +pair_allegro/ +libtorch/ +libtorch* +out.* +lammps.sif +log.lammps diff --git a/benchmarks/lammps-allegro/README.md b/benchmarks/lammps-allegro/README.md new file mode 100644 index 0000000..850ca9e --- /dev/null +++ b/benchmarks/lammps-allegro/README.md @@ -0,0 +1 @@ +Not Ready \ No newline at end of file diff --git a/benchmarks/lammps-allegro/data.LiMg.in b/benchmarks/lammps-allegro/data.LiMg.in new file mode 100644 index 0000000..21cf8b4 --- /dev/null +++ b/benchmarks/lammps-allegro/data.LiMg.in @@ -0,0 +1,258 @@ +data.in.223156 (written by ASE) + +247 atoms +2 atom types +0.0 17.5 xlo xhi +0.0 17.5 ylo yhi +0.0 17.5 zlo zhi + + +Atoms + + 1 0 1 0.0 0 0 0 + 2 0 1 0.0 1.75 1.75 1.75 + 3 0 2 0.0 0 0 3.5 + 4 0 2 0.0 1.75 1.75 5.25 + 5 0 1 0.0 0 0 7 + 6 0 1 0.0 1.75 1.75 8.75 + 7 0 1 0.0 0 0 10.5 + 8 0 1 0.0 1.75 1.75 12.25 + 9 0 1 0.0 0 0 14 + 10 0 1 0.0 1.75 1.75 15.75 + 11 0 1 0.0 0 3.5 0 + 12 0 1 0.0 1.75 5.25 1.75 + 13 0 1 0.0 0 3.5 3.5 + 14 0 1 0.0 1.75 5.25 5.25 + 15 0 1 0.0 0 3.5 7 + 16 0 1 0.0 1.75 5.25 8.75 + 17 0 2 0.0 0 3.5 10.5 + 18 0 1 0.0 1.75 5.25 12.25 + 19 0 1 0.0 0 3.5 14 + 20 0 1 0.0 1.75 5.25 15.75 + 21 0 1 0.0 0 7 0 + 22 0 2 0.0 1.75 8.75 1.75 + 23 0 1 0.0 0 7 3.5 + 24 0 1 0.0 1.75 8.75 5.25 + 25 0 1 0.0 0 7 7 + 26 0 2 0.0 1.75 8.75 8.75 + 27 0 1 0.0 0 7 10.5 + 28 0 2 0.0 1.75 8.75 12.25 + 29 0 1 0.0 0 7 14 + 30 0 1 0.0 1.75 8.75 15.75 + 31 0 2 0.0 0 10.5 0 + 32 0 1 0.0 1.75 12.25 1.75 + 33 0 2 0.0 0 10.5 3.5 + 34 0 1 0.0 1.75 12.25 5.25 + 35 0 2 0.0 0 10.5 7 + 36 0 1 0.0 1.75 12.25 8.75 + 37 0 1 0.0 0 10.5 10.5 + 38 0 1 0.0 1.75 12.25 12.25 + 39 0 2 0.0 0 10.5 14 + 40 0 2 0.0 1.75 12.25 15.75 + 41 0 1 0.0 0 14 0 + 42 0 2 0.0 1.75 15.75 1.75 + 43 0 1 0.0 0 14 3.5 + 44 0 1 0.0 1.75 15.75 5.25 + 45 0 1 0.0 0 14 7 + 46 0 1 0.0 1.75 15.75 8.75 + 47 0 1 0.0 0 14 10.5 + 48 0 1 0.0 1.75 15.75 12.25 + 49 0 1 0.0 0 14 14 + 50 0 1 0.0 1.75 15.75 15.75 + 51 0 1 0.0 3.5 0 0 + 52 0 2 0.0 5.25 1.75 1.75 + 53 0 2 0.0 3.5 0 3.5 + 54 0 1 0.0 5.25 1.75 5.25 + 55 0 1 0.0 3.5 0 7 + 56 0 1 0.0 5.25 1.75 8.75 + 57 0 1 0.0 3.5 0 10.5 + 58 0 1 0.0 5.25 1.75 12.25 + 59 0 1 0.0 3.5 0 14 + 60 0 1 0.0 5.25 1.75 15.75 + 61 0 2 0.0 3.5 3.5 0 + 62 0 1 0.0 5.25 5.25 1.75 + 63 0 2 0.0 3.5 3.5 3.5 + 64 0 2 0.0 5.25 5.25 5.25 + 65 0 1 0.0 3.5 3.5 7 + 66 0 2 0.0 5.25 5.25 8.75 + 67 0 1 0.0 3.5 3.5 10.5 + 68 0 1 0.0 5.25 5.25 12.25 + 69 0 1 0.0 3.5 3.5 14 + 70 0 1 0.0 5.25 5.25 15.75 + 71 0 1 0.0 3.5 7 0 + 72 0 1 0.0 5.25 8.75 1.75 + 73 0 1 0.0 3.5 7 3.5 + 74 0 1 0.0 5.25 8.75 5.25 + 75 0 1 0.0 3.5 7 7 + 76 0 1 0.0 5.25 8.75 8.75 + 77 0 2 0.0 3.5 7 10.5 + 78 0 1 0.0 5.25 8.75 12.25 + 79 0 1 0.0 3.5 7 14 + 80 0 2 0.0 5.25 8.75 15.75 + 81 0 1 0.0 3.5 10.5 0 + 82 0 1 0.0 5.25 12.25 1.75 + 83 0 1 0.0 3.5 10.5 3.5 + 84 0 1 0.0 5.25 12.25 5.25 + 85 0 1 0.0 3.5 10.5 7 + 86 0 1 0.0 5.25 12.25 8.75 + 87 0 1 0.0 3.5 10.5 10.5 + 88 0 1 0.0 5.25 12.25 12.25 + 89 0 1 0.0 3.5 10.5 14 + 90 0 2 0.0 5.25 12.25 15.75 + 91 0 1 0.0 3.5 14 0 + 92 0 1 0.0 5.25 15.75 1.75 + 93 0 1 0.0 3.5 14 3.5 + 94 0 1 0.0 5.25 15.75 5.25 + 95 0 1 0.0 3.5 14 7 + 96 0 2 0.0 5.25 15.75 8.75 + 97 0 1 0.0 3.5 14 10.5 + 98 0 1 0.0 5.25 15.75 12.25 + 99 0 1 0.0 3.5 14 14 + 100 0 2 0.0 5.25 15.75 15.75 + 101 0 2 0.0 7 0 0 + 102 0 2 0.0 8.75 1.75 1.75 + 103 0 2 0.0 7 0 3.5 + 104 0 2 0.0 8.75 1.75 5.25 + 105 0 1 0.0 7 0 7 + 106 0 1 0.0 8.75 1.75 8.75 + 107 0 1 0.0 7 0 10.5 + 108 0 1 0.0 8.75 1.75 12.25 + 109 0 2 0.0 7 0 14 + 110 0 1 0.0 8.75 1.75 15.75 + 111 0 1 0.0 7 3.5 0 + 112 0 2 0.0 8.75 5.25 1.75 + 113 0 1 0.0 7 3.5 3.5 + 114 0 1 0.0 8.75 5.25 5.25 + 115 0 1 0.0 7 3.5 7 + 116 0 1 0.0 8.75 5.25 8.75 + 117 0 1 0.0 7 3.5 10.5 + 118 0 1 0.0 8.75 5.25 12.25 + 119 0 1 0.0 7 3.5 14 + 120 0 2 0.0 8.75 5.25 15.75 + 121 0 1 0.0 7 7 0 + 122 0 1 0.0 8.75 8.75 1.75 + 123 0 1 0.0 7 7 3.5 + 124 0 1 0.0 8.75 8.75 5.25 + 125 0 2 0.0 7 7 7 + 126 0 2 0.0 8.75 8.75 8.75 + 127 0 1 0.0 7 7 10.5 + 128 0 2 0.0 8.75 8.75 12.25 + 129 0 1 0.0 7 7 14 + 130 0 1 0.0 8.75 8.75 15.75 + 131 0 1 0.0 7 10.5 0 + 132 0 1 0.0 8.75 12.25 1.75 + 133 0 1 0.0 7 10.5 3.5 + 134 0 1 0.0 8.75 12.25 5.25 + 135 0 1 0.0 7 10.5 7 + 136 0 1 0.0 8.75 12.25 8.75 + 137 0 1 0.0 7 10.5 10.5 + 138 0 1 0.0 8.75 12.25 12.25 + 139 0 1 0.0 7 10.5 14 + 140 0 1 0.0 8.75 12.25 15.75 + 141 0 2 0.0 7 14 0 + 142 0 1 0.0 8.75 15.75 1.75 + 143 0 1 0.0 7 14 3.5 + 144 0 1 0.0 8.75 15.75 5.25 + 145 0 1 0.0 7 14 7 + 146 0 1 0.0 8.75 15.75 8.75 + 147 0 1 0.0 8.75 15.75 12.25 + 148 0 1 0.0 7 14 14 + 149 0 1 0.0 8.75 15.75 15.75 + 150 0 2 0.0 10.5 0 0 + 151 0 1 0.0 12.25 1.75 1.75 + 152 0 1 0.0 12.25 1.75 5.25 + 153 0 1 0.0 10.5 0 7 + 154 0 2 0.0 12.25 1.75 8.75 + 155 0 1 0.0 10.5 0 10.5 + 156 0 1 0.0 12.25 1.75 12.25 + 157 0 1 0.0 10.5 0 14 + 158 0 1 0.0 12.25 1.75 15.75 + 159 0 2 0.0 10.5 3.5 0 + 160 0 1 0.0 12.25 5.25 1.75 + 161 0 1 0.0 10.5 3.5 3.5 + 162 0 2 0.0 12.25 5.25 5.25 + 163 0 1 0.0 10.5 3.5 7 + 164 0 2 0.0 12.25 5.25 8.75 + 165 0 1 0.0 10.5 3.5 10.5 + 166 0 1 0.0 12.25 5.25 12.25 + 167 0 1 0.0 10.5 3.5 14 + 168 0 1 0.0 12.25 5.25 15.75 + 169 0 2 0.0 10.5 7 0 + 170 0 2 0.0 12.25 8.75 1.75 + 171 0 1 0.0 10.5 7 3.5 + 172 0 1 0.0 12.25 8.75 5.25 + 173 0 1 0.0 10.5 7 7 + 174 0 2 0.0 12.25 8.75 8.75 + 175 0 2 0.0 10.5 7 10.5 + 176 0 1 0.0 12.25 8.75 12.25 + 177 0 1 0.0 10.5 7 14 + 178 0 2 0.0 12.25 8.75 15.75 + 179 0 2 0.0 10.5 10.5 0 + 180 0 1 0.0 12.25 12.25 1.75 + 181 0 2 0.0 10.5 10.5 3.5 + 182 0 1 0.0 12.25 12.25 5.25 + 183 0 1 0.0 10.5 10.5 7 + 184 0 1 0.0 12.25 12.25 8.75 + 185 0 2 0.0 10.5 10.5 10.5 + 186 0 1 0.0 12.25 12.25 12.25 + 187 0 1 0.0 10.5 10.5 14 + 188 0 1 0.0 12.25 12.25 15.75 + 189 0 1 0.0 10.5 14 0 + 190 0 2 0.0 12.25 15.75 1.75 + 191 0 2 0.0 10.5 14 3.5 + 192 0 1 0.0 12.25 15.75 5.25 + 193 0 1 0.0 10.5 14 7 + 194 0 1 0.0 12.25 15.75 8.75 + 195 0 1 0.0 10.5 14 10.5 + 196 0 1 0.0 12.25 15.75 12.25 + 197 0 2 0.0 10.5 14 14 + 198 0 1 0.0 12.25 15.75 15.75 + 199 0 1 0.0 14 0 0 + 200 0 2 0.0 15.75 1.75 1.75 + 201 0 2 0.0 14 0 3.5 + 202 0 1 0.0 15.75 1.75 5.25 + 203 0 2 0.0 14 0 7 + 204 0 1 0.0 15.75 1.75 8.75 + 205 0 1 0.0 14 0 10.5 + 206 0 1 0.0 15.75 1.75 12.25 + 207 0 1 0.0 14 0 14 + 208 0 1 0.0 15.75 1.75 15.75 + 209 0 1 0.0 15.75 5.25 1.75 + 210 0 1 0.0 14 3.5 3.5 + 211 0 1 0.0 15.75 5.25 5.25 + 212 0 1 0.0 14 3.5 7 + 213 0 1 0.0 15.75 5.25 8.75 + 214 0 2 0.0 14 3.5 10.5 + 215 0 2 0.0 15.75 5.25 12.25 + 216 0 1 0.0 14 3.5 14 + 217 0 1 0.0 15.75 5.25 15.75 + 218 0 2 0.0 14 7 0 + 219 0 1 0.0 15.75 8.75 1.75 + 220 0 1 0.0 14 7 3.5 + 221 0 1 0.0 15.75 8.75 5.25 + 222 0 1 0.0 14 7 7 + 223 0 2 0.0 15.75 8.75 8.75 + 224 0 1 0.0 14 7 10.5 + 225 0 1 0.0 15.75 8.75 12.25 + 226 0 1 0.0 14 7 14 + 227 0 2 0.0 15.75 8.75 15.75 + 228 0 1 0.0 14 10.5 0 + 229 0 1 0.0 15.75 12.25 1.75 + 230 0 2 0.0 14 10.5 3.5 + 231 0 1 0.0 15.75 12.25 5.25 + 232 0 1 0.0 14 10.5 7 + 233 0 1 0.0 15.75 12.25 8.75 + 234 0 1 0.0 14 10.5 10.5 + 235 0 2 0.0 15.75 12.25 12.25 + 236 0 1 0.0 14 10.5 14 + 237 0 1 0.0 15.75 12.25 15.75 + 238 0 1 0.0 14 14 0 + 239 0 1 0.0 15.75 15.75 1.75 + 240 0 1 0.0 14 14 3.5 + 241 0 1 0.0 15.75 15.75 5.25 + 242 0 1 0.0 14 14 7 + 243 0 1 0.0 15.75 15.75 8.75 + 244 0 1 0.0 14 14 10.5 + 245 0 1 0.0 15.75 15.75 12.25 + 246 0 2 0.0 14 14 14 + 247 0 1 0.0 15.75 15.75 15.75 diff --git a/benchmarks/lammps-allegro/get_performance.py b/benchmarks/lammps-allegro/get_performance.py new file mode 100644 index 0000000..efe80b3 --- /dev/null +++ b/benchmarks/lammps-allegro/get_performance.py @@ -0,0 +1,31 @@ +import os +import json + +out = {} +nextline = False +logfile = 'log.lammps' +path = '' +if not os.path.isfile(logfile): + path = 'benchmarks/lammps-allegro' +f = open(os.path.join(path,logfile)) +for line in f.readlines(): + if 'Performance' in line: + print(line) + out['raw1'] = line + _, p1, u1, p2, u2 = line.split(' ') + out['simulation length'] = float(p1) + out['simulation length units'] = u1[:-1] + out['timesteps/s'] = float(p2) + nextline = True + elif nextline: + print(line) + out['raw2'] = line + out['utilization'] = line.split(' ')[0] + nextline = False + +f.close() + +with open( + os.path.join(path,'out.json'),'w' +) as fl: + json.dump(out,fl) \ No newline at end of file diff --git a/benchmarks/lammps-allegro/in.lj.txt b/benchmarks/lammps-allegro/in.lj.txt new file mode 100644 index 0000000..01e12ef --- /dev/null +++ b/benchmarks/lammps-allegro/in.lj.txt @@ -0,0 +1,30 @@ +# 3d Lennard-Jones melt + +variable x index 1 +variable y index 1 +variable z index 1 + +variable xx equal 20*$x +variable yy equal 20*$y +variable zz equal 20*$z + +units lj +atom_style atomic + +lattice fcc 0.8442 +region box block 0 ${xx} 0 ${yy} 0 ${zz} +create_box 1 box +create_atoms 1 box +mass 1 1.0 + +velocity all create 1.44 87287 loop geom + +pair_style lj/cut 2.5 +pair_coeff 1 1 1.0 1.0 2.5 + +neighbor 0.3 bin +neigh_modify delay 0 every 20 check no + +fix 1 all nve + +run 100 diff --git a/benchmarks/lammps-allegro/install.sh b/benchmarks/lammps-allegro/install.sh new file mode 100644 index 0000000..f98d78f --- /dev/null +++ b/benchmarks/lammps-allegro/install.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# Fail on error +set -o pipefail +set -e + +module load anaconda3 +conda create -n env python=3.9 + +nproc=1 + +# Download Lammps +# git clone -b stable_29Sep2021_update2 --depth 1 git@github.com:lammps/lammps +git clone -b stable_23Jun2022 https://github.com/lammps/lammps.git lammps + +# Download Allegro Patch +git clone https://github.com/mir-group/pair_allegro.git + +# Install Allegro Patch +cd pair_allegro +./patch_lammps.sh ../lammps/ +cd ../ + +# Download LibTorch +wget https://download.pytorch.org/libtorch/cu117/libtorch-cxx11-abi-shared-with-deps-2.0.1%2Bcu117.zip +unzip libtorch-cxx11-abi-shared-with-deps-2.0.1+cu117.zip + +# Install MKL + +# Load OpenMPI +module load openmpi + +# Load CUDA +module load cuda/11.7.1 + +# Install OpenMP +# spack install llvm-openmp +# spack load llvm-openmp + + + +# Install LAMMPS +cd lammps +mkdir build +cd build +cmake ../cmake -DCMAKE_PREFIX_PATH=../../libtorch \ + -DPKG_KOKKOS=ON -DKokkos_ENABLE_CUDA=ON -DKokkos_ENABLE_OPENMP=off \ + -DCMAKE_CXX_COMPILER=/jet/home/anneveli/github/EEGMark/benchmarks/lammps-allegro/lammps/lib/kokkos/bin/nvcc_wrapper + +cmake -C ../cmake/presets/basic.cmake -D BUILD_SHARED_LIBS=on -D BUILD_MPI=on \ + -D LAMMPS_EXCEPTIONS=on -D PKG_QEQ=on -D PKG_MEAM=on ../cmake + +# make -j$(nproc) +# #SBATCH -A venkvis_gpu +#SBATCH --gres=gpu:1 +# srun --account=venkvis_gpu --partition=gpu --gres=gpu:1 --mem=2G --time=30 --pty bash \ No newline at end of file diff --git a/benchmarks/lammps-allegro/potential_files/meam.app b/benchmarks/lammps-allegro/potential_files/meam.app new file mode 100644 index 0000000..bd6355d --- /dev/null +++ b/benchmarks/lammps-allegro/potential_files/meam.app @@ -0,0 +1,38 @@ +erose_form = 2 +rc = 4.8 +delr = 0.1 +ialloy = 2 +Cmin(1,1,1) = 0.1599999964237213 +Cmax(1,1,1) = 2.799999952316284 +Cmin(1,1,2) = 0.1599999964237213 +Cmax(1,1,2) = 2.799999952316284 +Cmin(1,2,1) = 0.30250000953674316 +Cmax(1,2,1) = 1.440000057220459 +Cmin(1,2,2) = 0.30250000953674316 +Cmax(1,2,2) = 2.799999952316284 +Cmin(2,2,1) = 0.8100000023841858 +Cmax(2,2,1) = 2.799999952316284 +Cmin(2,2,2) = 0.49000000953674316 +Cmax(2,2,2) = 2.799999952316284 +lattce(1,2) = b2 +alpha(1,2) = 3.849928855895996 +re(1,2) = 2.950000047683716 +Ec(1,2) = 1.559999942779541 +augt1(1) = 0 +repuls(1,1) = 0.05000000074505806 +attrac(1,1) = 0.05000000074505806 +zbl(1,1) = 0 +nn2(1,1) = 1 +repuls(1,2) = 0.02500000037252903 +attrac(1,2) = 0.02500000037252903 +zbl(1,2) = 0 +nn2(1,2) = 1 +augt1(2) = 0 +repuls(2,1) = 0.02500000037252903 +attrac(2,1) = 0.02500000037252903 +zbl(2,1) = 0 +nn2(2,1) = 1 +repuls(2,2) = 0.0 +attrac(2,2) = 0.0 +zbl(2,2) = 0 +nn2(2,2) = 1 diff --git a/benchmarks/lammps-allegro/potential_files/meam.lib b/benchmarks/lammps-allegro/potential_files/meam.lib new file mode 100644 index 0000000..c9ed9bc --- /dev/null +++ b/benchmarks/lammps-allegro/potential_files/meam.lib @@ -0,0 +1,6 @@ +'Li' 'bcc' 8 3 6.94 +4.110938549041748 1.649999976158142 1.0 4.0 1.0 3.4871954917907715 1.649999976158142 0.949999988079071 +1 2.299999952316284 5.0 0.5 1.0 3 +'Mg' 'hcp' 12 12 24.305 +5.574794769287109 2.299999952316284 1.0 3.0 1.0 3.200000047683716 1.5499999523162842 0.5199999809265137 +1 9.0 -2.0 9.5 1.0 3 diff --git a/benchmarks/lammps-allegro/run.LiMg2nnmeam.in b/benchmarks/lammps-allegro/run.LiMg2nnmeam.in new file mode 100644 index 0000000..7a962bc --- /dev/null +++ b/benchmarks/lammps-allegro/run.LiMg2nnmeam.in @@ -0,0 +1,21 @@ +units metal +boundary p p p +atom_style full +read_data data.LiMg.in +mass 1 6.94 +mass 2 24.305 +change_box all triclinic +velocity all create 600 14322 +pair_style meam/c +pair_coeff * * potential_files/meam.lib Li Mg potential_files/meam.app Li Mg +neighbor 0.3 bin +neigh_modify delay 10 + +fix 2 all npt temp 600 600 $(100.0*dt) aniso 0 0 $(1000.0*dt) +thermo 10000 +thermo_style custom step temp pe ke +# dump dump_1 all custom 10000 /ocean/projects/cts180021p/anneveli/ALBENCHMARK/datasets/Li3Mg1_bcc_npt_aniso_600_prod_10ns_d10000fs/Li3Mg1_bcc_npt_aniso_600K_10ns_d10000fs_*.xyz id element x y z fx fy fz +# dump_modify dump_1 element Li Mg +timestep 0.001 +run 10000000 + diff --git a/benchmarks/lammps-allegro/submit.sh b/benchmarks/lammps-allegro/submit.sh new file mode 100644 index 0000000..ed144f8 --- /dev/null +++ b/benchmarks/lammps-allegro/submit.sh @@ -0,0 +1,31 @@ +#!/usr/bin/bash + +#SBATCH -t 00-01:00 +#SBATCH -J lammps_benchmark +#SBATCH -o logs/output.%j +#SBATCH -e logs/error.%j +#SBATCH -p GPU-shared +#SBATCH -N 1 +#SBATCH --ntasks=1 +#SBATCH --gpus=v100-32:1 +#SBATCH --mem-per-gpu=50000 +#SBATCH --mail-user=eannevel@andrew.cmu.edu + +set -e; set -o pipefail + +# Load required modules +# module load singularity + +# Build SIF, if it doesn't exist +if [[ ! -f lammps.sif ]]; then + singularity build lammps.sif docker://nvcr.io/hpc/lammps:29Oct2020 +fi + +readonly gpus_per_node=$(( SLURM_NTASKS / SLURM_JOB_NUM_NODES )) + +echo "Running Lennard Jones 8x8x8 example on ${SLURM_NTASKS} GPUS..." +# echo "Running 2NN MEAM example on ${SLURM_NTASKS} GPUS..." +srun --mpi=pmi2 \ +singularity run --nv -B ${PWD}:/host_pwd lammps.sif \ +lmp -k on g ${gpus_per_node} -sf kk -pk kokkos cuda/aware on neigh full comm device binsize 2.8 -var x 8 -var y 8 -var z 8 -in /host_pwd/run.LiMg2nnmeam.in +# lmp -k on g ${gpus_per_node} -sf kk -pk kokkos cuda/aware on neigh full comm device binsize 2.8 -var x 8 -var y 8 -var z 8 -in /host_pwd/in.lj.txt \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/.gitignore b/benchmarks/lammps-nvidia/.gitignore new file mode 100644 index 0000000..dad979a --- /dev/null +++ b/benchmarks/lammps-nvidia/.gitignore @@ -0,0 +1,4 @@ +lammps.sif +log.lammps +out.* +logs/ \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/README.md b/benchmarks/lammps-nvidia/README.md new file mode 100644 index 0000000..7fba52c --- /dev/null +++ b/benchmarks/lammps-nvidia/README.md @@ -0,0 +1,3 @@ +'run.sh' and 'install.sh' assume that conda is on the path. You may need to modify to load conda. + +`submit.sh` is a simple test script to submit on Bridges-2. \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/get_performance.py b/benchmarks/lammps-nvidia/get_performance.py new file mode 100644 index 0000000..9fce2c8 --- /dev/null +++ b/benchmarks/lammps-nvidia/get_performance.py @@ -0,0 +1,32 @@ +import os +import json +import yaml + +out = {} +nextline = False +logfile = 'log.lammps' +path = '' +if not os.path.isfile(logfile): + path = 'benchmarks/lammps-allegro' +f = open(os.path.join(path,logfile)) +for line in f.readlines(): + if 'Performance' in line: + print(line) + out['raw1'] = line + _, p1, u1, p2, u2 = line.split(' ') + out['simulation length'] = float(p1) + out['simulation length units'] = u1[:-1] + out['timesteps/s'] = float(p2) + nextline = True + elif nextline: + print(line) + out['raw2'] = line + out['utilization'] = line.split(' ')[0] + nextline = False + +f.close() + +with open( + os.path.join(path,'out.yaml'),'w' +) as fl: + yaml.dump(out,fl) \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/in.lj.txt b/benchmarks/lammps-nvidia/in.lj.txt new file mode 100644 index 0000000..01e12ef --- /dev/null +++ b/benchmarks/lammps-nvidia/in.lj.txt @@ -0,0 +1,30 @@ +# 3d Lennard-Jones melt + +variable x index 1 +variable y index 1 +variable z index 1 + +variable xx equal 20*$x +variable yy equal 20*$y +variable zz equal 20*$z + +units lj +atom_style atomic + +lattice fcc 0.8442 +region box block 0 ${xx} 0 ${yy} 0 ${zz} +create_box 1 box +create_atoms 1 box +mass 1 1.0 + +velocity all create 1.44 87287 loop geom + +pair_style lj/cut 2.5 +pair_coeff 1 1 1.0 1.0 2.5 + +neighbor 0.3 bin +neigh_modify delay 0 every 20 check no + +fix 1 all nve + +run 100 diff --git a/benchmarks/lammps-nvidia/install.sh b/benchmarks/lammps-nvidia/install.sh new file mode 100755 index 0000000..8906047 --- /dev/null +++ b/benchmarks/lammps-nvidia/install.sh @@ -0,0 +1,7 @@ +chmod +x run.sh + +mkdir logs + +conda create -n env python=3.9 +conda activate env +conda install pyyaml \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/run.sh b/benchmarks/lammps-nvidia/run.sh new file mode 100755 index 0000000..0741f28 --- /dev/null +++ b/benchmarks/lammps-nvidia/run.sh @@ -0,0 +1,19 @@ +#!/usr/bin/bash + +set -e; set -o pipefail + +# Build SIF, if it doesn't exist +if [[ ! -f lammps.sif ]]; then + singularity build lammps.sif docker://nvcr.io/hpc/lammps:29Oct2020 +fi + +readonly gpus_per_node=$(( SLURM_NTASKS / SLURM_JOB_NUM_NODES )) + +echo "Running Lennard Jones 8x8x8 example on ${SLURM_NTASKS} GPUS..." +# echo "Running 2NN MEAM example on ${SLURM_NTASKS} GPUS..." +srun --mpi=pmi2 \ +singularity run --nv -B ${PWD}:/host_pwd lammps.sif \ +lmp -k on g ${gpus_per_node} -sf kk -pk kokkos cuda/aware on neigh full comm device binsize 2.8 -var x 8 -var y 8 -var z 8 -in /host_pwd/in.lj.txt + +conda activate env +python get_performance.py \ No newline at end of file diff --git a/benchmarks/lammps-nvidia/submit.sh b/benchmarks/lammps-nvidia/submit.sh new file mode 100644 index 0000000..58fed7a --- /dev/null +++ b/benchmarks/lammps-nvidia/submit.sh @@ -0,0 +1,16 @@ +#!/usr/bin/bash + +#SBATCH -t 00-01:00 +#SBATCH -J lammps_benchmark +#SBATCH -o logs/output.%j +#SBATCH -e logs/error.%j +#SBATCH -p GPU-shared +#SBATCH -N 1 +#SBATCH --ntasks=1 +#SBATCH --gpus=v100-32:1 +#SBATCH --mem-per-gpu=50000 +#SBATCH --mail-user=eannevel@andrew.cmu.edu + +module load anaconda3 + +./run.sh \ No newline at end of file From ec2af5674bf62f79f464262ddaefc893a0743d65 Mon Sep 17 00:00:00 2001 From: emilannevelink Date: Thu, 8 Jun 2023 13:25:25 -0400 Subject: [PATCH 2/2] remove allegro --- benchmarks/lammps-allegro/.gitignore | 8 - benchmarks/lammps-allegro/README.md | 1 - benchmarks/lammps-allegro/data.LiMg.in | 258 ------------------ benchmarks/lammps-allegro/get_performance.py | 31 --- benchmarks/lammps-allegro/in.lj.txt | 30 -- benchmarks/lammps-allegro/install.sh | 56 ---- .../lammps-allegro/potential_files/meam.app | 38 --- .../lammps-allegro/potential_files/meam.lib | 6 - benchmarks/lammps-allegro/run.LiMg2nnmeam.in | 21 -- benchmarks/lammps-allegro/submit.sh | 31 --- 10 files changed, 480 deletions(-) delete mode 100644 benchmarks/lammps-allegro/.gitignore delete mode 100644 benchmarks/lammps-allegro/README.md delete mode 100644 benchmarks/lammps-allegro/data.LiMg.in delete mode 100644 benchmarks/lammps-allegro/get_performance.py delete mode 100644 benchmarks/lammps-allegro/in.lj.txt delete mode 100644 benchmarks/lammps-allegro/install.sh delete mode 100644 benchmarks/lammps-allegro/potential_files/meam.app delete mode 100644 benchmarks/lammps-allegro/potential_files/meam.lib delete mode 100644 benchmarks/lammps-allegro/run.LiMg2nnmeam.in delete mode 100644 benchmarks/lammps-allegro/submit.sh diff --git a/benchmarks/lammps-allegro/.gitignore b/benchmarks/lammps-allegro/.gitignore deleted file mode 100644 index cb04cf6..0000000 --- a/benchmarks/lammps-allegro/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -logs/ -lammps/ -pair_allegro/ -libtorch/ -libtorch* -out.* -lammps.sif -log.lammps diff --git a/benchmarks/lammps-allegro/README.md b/benchmarks/lammps-allegro/README.md deleted file mode 100644 index 850ca9e..0000000 --- a/benchmarks/lammps-allegro/README.md +++ /dev/null @@ -1 +0,0 @@ -Not Ready \ No newline at end of file diff --git a/benchmarks/lammps-allegro/data.LiMg.in b/benchmarks/lammps-allegro/data.LiMg.in deleted file mode 100644 index 21cf8b4..0000000 --- a/benchmarks/lammps-allegro/data.LiMg.in +++ /dev/null @@ -1,258 +0,0 @@ -data.in.223156 (written by ASE) - -247 atoms -2 atom types -0.0 17.5 xlo xhi -0.0 17.5 ylo yhi -0.0 17.5 zlo zhi - - -Atoms - - 1 0 1 0.0 0 0 0 - 2 0 1 0.0 1.75 1.75 1.75 - 3 0 2 0.0 0 0 3.5 - 4 0 2 0.0 1.75 1.75 5.25 - 5 0 1 0.0 0 0 7 - 6 0 1 0.0 1.75 1.75 8.75 - 7 0 1 0.0 0 0 10.5 - 8 0 1 0.0 1.75 1.75 12.25 - 9 0 1 0.0 0 0 14 - 10 0 1 0.0 1.75 1.75 15.75 - 11 0 1 0.0 0 3.5 0 - 12 0 1 0.0 1.75 5.25 1.75 - 13 0 1 0.0 0 3.5 3.5 - 14 0 1 0.0 1.75 5.25 5.25 - 15 0 1 0.0 0 3.5 7 - 16 0 1 0.0 1.75 5.25 8.75 - 17 0 2 0.0 0 3.5 10.5 - 18 0 1 0.0 1.75 5.25 12.25 - 19 0 1 0.0 0 3.5 14 - 20 0 1 0.0 1.75 5.25 15.75 - 21 0 1 0.0 0 7 0 - 22 0 2 0.0 1.75 8.75 1.75 - 23 0 1 0.0 0 7 3.5 - 24 0 1 0.0 1.75 8.75 5.25 - 25 0 1 0.0 0 7 7 - 26 0 2 0.0 1.75 8.75 8.75 - 27 0 1 0.0 0 7 10.5 - 28 0 2 0.0 1.75 8.75 12.25 - 29 0 1 0.0 0 7 14 - 30 0 1 0.0 1.75 8.75 15.75 - 31 0 2 0.0 0 10.5 0 - 32 0 1 0.0 1.75 12.25 1.75 - 33 0 2 0.0 0 10.5 3.5 - 34 0 1 0.0 1.75 12.25 5.25 - 35 0 2 0.0 0 10.5 7 - 36 0 1 0.0 1.75 12.25 8.75 - 37 0 1 0.0 0 10.5 10.5 - 38 0 1 0.0 1.75 12.25 12.25 - 39 0 2 0.0 0 10.5 14 - 40 0 2 0.0 1.75 12.25 15.75 - 41 0 1 0.0 0 14 0 - 42 0 2 0.0 1.75 15.75 1.75 - 43 0 1 0.0 0 14 3.5 - 44 0 1 0.0 1.75 15.75 5.25 - 45 0 1 0.0 0 14 7 - 46 0 1 0.0 1.75 15.75 8.75 - 47 0 1 0.0 0 14 10.5 - 48 0 1 0.0 1.75 15.75 12.25 - 49 0 1 0.0 0 14 14 - 50 0 1 0.0 1.75 15.75 15.75 - 51 0 1 0.0 3.5 0 0 - 52 0 2 0.0 5.25 1.75 1.75 - 53 0 2 0.0 3.5 0 3.5 - 54 0 1 0.0 5.25 1.75 5.25 - 55 0 1 0.0 3.5 0 7 - 56 0 1 0.0 5.25 1.75 8.75 - 57 0 1 0.0 3.5 0 10.5 - 58 0 1 0.0 5.25 1.75 12.25 - 59 0 1 0.0 3.5 0 14 - 60 0 1 0.0 5.25 1.75 15.75 - 61 0 2 0.0 3.5 3.5 0 - 62 0 1 0.0 5.25 5.25 1.75 - 63 0 2 0.0 3.5 3.5 3.5 - 64 0 2 0.0 5.25 5.25 5.25 - 65 0 1 0.0 3.5 3.5 7 - 66 0 2 0.0 5.25 5.25 8.75 - 67 0 1 0.0 3.5 3.5 10.5 - 68 0 1 0.0 5.25 5.25 12.25 - 69 0 1 0.0 3.5 3.5 14 - 70 0 1 0.0 5.25 5.25 15.75 - 71 0 1 0.0 3.5 7 0 - 72 0 1 0.0 5.25 8.75 1.75 - 73 0 1 0.0 3.5 7 3.5 - 74 0 1 0.0 5.25 8.75 5.25 - 75 0 1 0.0 3.5 7 7 - 76 0 1 0.0 5.25 8.75 8.75 - 77 0 2 0.0 3.5 7 10.5 - 78 0 1 0.0 5.25 8.75 12.25 - 79 0 1 0.0 3.5 7 14 - 80 0 2 0.0 5.25 8.75 15.75 - 81 0 1 0.0 3.5 10.5 0 - 82 0 1 0.0 5.25 12.25 1.75 - 83 0 1 0.0 3.5 10.5 3.5 - 84 0 1 0.0 5.25 12.25 5.25 - 85 0 1 0.0 3.5 10.5 7 - 86 0 1 0.0 5.25 12.25 8.75 - 87 0 1 0.0 3.5 10.5 10.5 - 88 0 1 0.0 5.25 12.25 12.25 - 89 0 1 0.0 3.5 10.5 14 - 90 0 2 0.0 5.25 12.25 15.75 - 91 0 1 0.0 3.5 14 0 - 92 0 1 0.0 5.25 15.75 1.75 - 93 0 1 0.0 3.5 14 3.5 - 94 0 1 0.0 5.25 15.75 5.25 - 95 0 1 0.0 3.5 14 7 - 96 0 2 0.0 5.25 15.75 8.75 - 97 0 1 0.0 3.5 14 10.5 - 98 0 1 0.0 5.25 15.75 12.25 - 99 0 1 0.0 3.5 14 14 - 100 0 2 0.0 5.25 15.75 15.75 - 101 0 2 0.0 7 0 0 - 102 0 2 0.0 8.75 1.75 1.75 - 103 0 2 0.0 7 0 3.5 - 104 0 2 0.0 8.75 1.75 5.25 - 105 0 1 0.0 7 0 7 - 106 0 1 0.0 8.75 1.75 8.75 - 107 0 1 0.0 7 0 10.5 - 108 0 1 0.0 8.75 1.75 12.25 - 109 0 2 0.0 7 0 14 - 110 0 1 0.0 8.75 1.75 15.75 - 111 0 1 0.0 7 3.5 0 - 112 0 2 0.0 8.75 5.25 1.75 - 113 0 1 0.0 7 3.5 3.5 - 114 0 1 0.0 8.75 5.25 5.25 - 115 0 1 0.0 7 3.5 7 - 116 0 1 0.0 8.75 5.25 8.75 - 117 0 1 0.0 7 3.5 10.5 - 118 0 1 0.0 8.75 5.25 12.25 - 119 0 1 0.0 7 3.5 14 - 120 0 2 0.0 8.75 5.25 15.75 - 121 0 1 0.0 7 7 0 - 122 0 1 0.0 8.75 8.75 1.75 - 123 0 1 0.0 7 7 3.5 - 124 0 1 0.0 8.75 8.75 5.25 - 125 0 2 0.0 7 7 7 - 126 0 2 0.0 8.75 8.75 8.75 - 127 0 1 0.0 7 7 10.5 - 128 0 2 0.0 8.75 8.75 12.25 - 129 0 1 0.0 7 7 14 - 130 0 1 0.0 8.75 8.75 15.75 - 131 0 1 0.0 7 10.5 0 - 132 0 1 0.0 8.75 12.25 1.75 - 133 0 1 0.0 7 10.5 3.5 - 134 0 1 0.0 8.75 12.25 5.25 - 135 0 1 0.0 7 10.5 7 - 136 0 1 0.0 8.75 12.25 8.75 - 137 0 1 0.0 7 10.5 10.5 - 138 0 1 0.0 8.75 12.25 12.25 - 139 0 1 0.0 7 10.5 14 - 140 0 1 0.0 8.75 12.25 15.75 - 141 0 2 0.0 7 14 0 - 142 0 1 0.0 8.75 15.75 1.75 - 143 0 1 0.0 7 14 3.5 - 144 0 1 0.0 8.75 15.75 5.25 - 145 0 1 0.0 7 14 7 - 146 0 1 0.0 8.75 15.75 8.75 - 147 0 1 0.0 8.75 15.75 12.25 - 148 0 1 0.0 7 14 14 - 149 0 1 0.0 8.75 15.75 15.75 - 150 0 2 0.0 10.5 0 0 - 151 0 1 0.0 12.25 1.75 1.75 - 152 0 1 0.0 12.25 1.75 5.25 - 153 0 1 0.0 10.5 0 7 - 154 0 2 0.0 12.25 1.75 8.75 - 155 0 1 0.0 10.5 0 10.5 - 156 0 1 0.0 12.25 1.75 12.25 - 157 0 1 0.0 10.5 0 14 - 158 0 1 0.0 12.25 1.75 15.75 - 159 0 2 0.0 10.5 3.5 0 - 160 0 1 0.0 12.25 5.25 1.75 - 161 0 1 0.0 10.5 3.5 3.5 - 162 0 2 0.0 12.25 5.25 5.25 - 163 0 1 0.0 10.5 3.5 7 - 164 0 2 0.0 12.25 5.25 8.75 - 165 0 1 0.0 10.5 3.5 10.5 - 166 0 1 0.0 12.25 5.25 12.25 - 167 0 1 0.0 10.5 3.5 14 - 168 0 1 0.0 12.25 5.25 15.75 - 169 0 2 0.0 10.5 7 0 - 170 0 2 0.0 12.25 8.75 1.75 - 171 0 1 0.0 10.5 7 3.5 - 172 0 1 0.0 12.25 8.75 5.25 - 173 0 1 0.0 10.5 7 7 - 174 0 2 0.0 12.25 8.75 8.75 - 175 0 2 0.0 10.5 7 10.5 - 176 0 1 0.0 12.25 8.75 12.25 - 177 0 1 0.0 10.5 7 14 - 178 0 2 0.0 12.25 8.75 15.75 - 179 0 2 0.0 10.5 10.5 0 - 180 0 1 0.0 12.25 12.25 1.75 - 181 0 2 0.0 10.5 10.5 3.5 - 182 0 1 0.0 12.25 12.25 5.25 - 183 0 1 0.0 10.5 10.5 7 - 184 0 1 0.0 12.25 12.25 8.75 - 185 0 2 0.0 10.5 10.5 10.5 - 186 0 1 0.0 12.25 12.25 12.25 - 187 0 1 0.0 10.5 10.5 14 - 188 0 1 0.0 12.25 12.25 15.75 - 189 0 1 0.0 10.5 14 0 - 190 0 2 0.0 12.25 15.75 1.75 - 191 0 2 0.0 10.5 14 3.5 - 192 0 1 0.0 12.25 15.75 5.25 - 193 0 1 0.0 10.5 14 7 - 194 0 1 0.0 12.25 15.75 8.75 - 195 0 1 0.0 10.5 14 10.5 - 196 0 1 0.0 12.25 15.75 12.25 - 197 0 2 0.0 10.5 14 14 - 198 0 1 0.0 12.25 15.75 15.75 - 199 0 1 0.0 14 0 0 - 200 0 2 0.0 15.75 1.75 1.75 - 201 0 2 0.0 14 0 3.5 - 202 0 1 0.0 15.75 1.75 5.25 - 203 0 2 0.0 14 0 7 - 204 0 1 0.0 15.75 1.75 8.75 - 205 0 1 0.0 14 0 10.5 - 206 0 1 0.0 15.75 1.75 12.25 - 207 0 1 0.0 14 0 14 - 208 0 1 0.0 15.75 1.75 15.75 - 209 0 1 0.0 15.75 5.25 1.75 - 210 0 1 0.0 14 3.5 3.5 - 211 0 1 0.0 15.75 5.25 5.25 - 212 0 1 0.0 14 3.5 7 - 213 0 1 0.0 15.75 5.25 8.75 - 214 0 2 0.0 14 3.5 10.5 - 215 0 2 0.0 15.75 5.25 12.25 - 216 0 1 0.0 14 3.5 14 - 217 0 1 0.0 15.75 5.25 15.75 - 218 0 2 0.0 14 7 0 - 219 0 1 0.0 15.75 8.75 1.75 - 220 0 1 0.0 14 7 3.5 - 221 0 1 0.0 15.75 8.75 5.25 - 222 0 1 0.0 14 7 7 - 223 0 2 0.0 15.75 8.75 8.75 - 224 0 1 0.0 14 7 10.5 - 225 0 1 0.0 15.75 8.75 12.25 - 226 0 1 0.0 14 7 14 - 227 0 2 0.0 15.75 8.75 15.75 - 228 0 1 0.0 14 10.5 0 - 229 0 1 0.0 15.75 12.25 1.75 - 230 0 2 0.0 14 10.5 3.5 - 231 0 1 0.0 15.75 12.25 5.25 - 232 0 1 0.0 14 10.5 7 - 233 0 1 0.0 15.75 12.25 8.75 - 234 0 1 0.0 14 10.5 10.5 - 235 0 2 0.0 15.75 12.25 12.25 - 236 0 1 0.0 14 10.5 14 - 237 0 1 0.0 15.75 12.25 15.75 - 238 0 1 0.0 14 14 0 - 239 0 1 0.0 15.75 15.75 1.75 - 240 0 1 0.0 14 14 3.5 - 241 0 1 0.0 15.75 15.75 5.25 - 242 0 1 0.0 14 14 7 - 243 0 1 0.0 15.75 15.75 8.75 - 244 0 1 0.0 14 14 10.5 - 245 0 1 0.0 15.75 15.75 12.25 - 246 0 2 0.0 14 14 14 - 247 0 1 0.0 15.75 15.75 15.75 diff --git a/benchmarks/lammps-allegro/get_performance.py b/benchmarks/lammps-allegro/get_performance.py deleted file mode 100644 index efe80b3..0000000 --- a/benchmarks/lammps-allegro/get_performance.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import json - -out = {} -nextline = False -logfile = 'log.lammps' -path = '' -if not os.path.isfile(logfile): - path = 'benchmarks/lammps-allegro' -f = open(os.path.join(path,logfile)) -for line in f.readlines(): - if 'Performance' in line: - print(line) - out['raw1'] = line - _, p1, u1, p2, u2 = line.split(' ') - out['simulation length'] = float(p1) - out['simulation length units'] = u1[:-1] - out['timesteps/s'] = float(p2) - nextline = True - elif nextline: - print(line) - out['raw2'] = line - out['utilization'] = line.split(' ')[0] - nextline = False - -f.close() - -with open( - os.path.join(path,'out.json'),'w' -) as fl: - json.dump(out,fl) \ No newline at end of file diff --git a/benchmarks/lammps-allegro/in.lj.txt b/benchmarks/lammps-allegro/in.lj.txt deleted file mode 100644 index 01e12ef..0000000 --- a/benchmarks/lammps-allegro/in.lj.txt +++ /dev/null @@ -1,30 +0,0 @@ -# 3d Lennard-Jones melt - -variable x index 1 -variable y index 1 -variable z index 1 - -variable xx equal 20*$x -variable yy equal 20*$y -variable zz equal 20*$z - -units lj -atom_style atomic - -lattice fcc 0.8442 -region box block 0 ${xx} 0 ${yy} 0 ${zz} -create_box 1 box -create_atoms 1 box -mass 1 1.0 - -velocity all create 1.44 87287 loop geom - -pair_style lj/cut 2.5 -pair_coeff 1 1 1.0 1.0 2.5 - -neighbor 0.3 bin -neigh_modify delay 0 every 20 check no - -fix 1 all nve - -run 100 diff --git a/benchmarks/lammps-allegro/install.sh b/benchmarks/lammps-allegro/install.sh deleted file mode 100644 index f98d78f..0000000 --- a/benchmarks/lammps-allegro/install.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# Fail on error -set -o pipefail -set -e - -module load anaconda3 -conda create -n env python=3.9 - -nproc=1 - -# Download Lammps -# git clone -b stable_29Sep2021_update2 --depth 1 git@github.com:lammps/lammps -git clone -b stable_23Jun2022 https://github.com/lammps/lammps.git lammps - -# Download Allegro Patch -git clone https://github.com/mir-group/pair_allegro.git - -# Install Allegro Patch -cd pair_allegro -./patch_lammps.sh ../lammps/ -cd ../ - -# Download LibTorch -wget https://download.pytorch.org/libtorch/cu117/libtorch-cxx11-abi-shared-with-deps-2.0.1%2Bcu117.zip -unzip libtorch-cxx11-abi-shared-with-deps-2.0.1+cu117.zip - -# Install MKL - -# Load OpenMPI -module load openmpi - -# Load CUDA -module load cuda/11.7.1 - -# Install OpenMP -# spack install llvm-openmp -# spack load llvm-openmp - - - -# Install LAMMPS -cd lammps -mkdir build -cd build -cmake ../cmake -DCMAKE_PREFIX_PATH=../../libtorch \ - -DPKG_KOKKOS=ON -DKokkos_ENABLE_CUDA=ON -DKokkos_ENABLE_OPENMP=off \ - -DCMAKE_CXX_COMPILER=/jet/home/anneveli/github/EEGMark/benchmarks/lammps-allegro/lammps/lib/kokkos/bin/nvcc_wrapper - -cmake -C ../cmake/presets/basic.cmake -D BUILD_SHARED_LIBS=on -D BUILD_MPI=on \ - -D LAMMPS_EXCEPTIONS=on -D PKG_QEQ=on -D PKG_MEAM=on ../cmake - -# make -j$(nproc) -# #SBATCH -A venkvis_gpu -#SBATCH --gres=gpu:1 -# srun --account=venkvis_gpu --partition=gpu --gres=gpu:1 --mem=2G --time=30 --pty bash \ No newline at end of file diff --git a/benchmarks/lammps-allegro/potential_files/meam.app b/benchmarks/lammps-allegro/potential_files/meam.app deleted file mode 100644 index bd6355d..0000000 --- a/benchmarks/lammps-allegro/potential_files/meam.app +++ /dev/null @@ -1,38 +0,0 @@ -erose_form = 2 -rc = 4.8 -delr = 0.1 -ialloy = 2 -Cmin(1,1,1) = 0.1599999964237213 -Cmax(1,1,1) = 2.799999952316284 -Cmin(1,1,2) = 0.1599999964237213 -Cmax(1,1,2) = 2.799999952316284 -Cmin(1,2,1) = 0.30250000953674316 -Cmax(1,2,1) = 1.440000057220459 -Cmin(1,2,2) = 0.30250000953674316 -Cmax(1,2,2) = 2.799999952316284 -Cmin(2,2,1) = 0.8100000023841858 -Cmax(2,2,1) = 2.799999952316284 -Cmin(2,2,2) = 0.49000000953674316 -Cmax(2,2,2) = 2.799999952316284 -lattce(1,2) = b2 -alpha(1,2) = 3.849928855895996 -re(1,2) = 2.950000047683716 -Ec(1,2) = 1.559999942779541 -augt1(1) = 0 -repuls(1,1) = 0.05000000074505806 -attrac(1,1) = 0.05000000074505806 -zbl(1,1) = 0 -nn2(1,1) = 1 -repuls(1,2) = 0.02500000037252903 -attrac(1,2) = 0.02500000037252903 -zbl(1,2) = 0 -nn2(1,2) = 1 -augt1(2) = 0 -repuls(2,1) = 0.02500000037252903 -attrac(2,1) = 0.02500000037252903 -zbl(2,1) = 0 -nn2(2,1) = 1 -repuls(2,2) = 0.0 -attrac(2,2) = 0.0 -zbl(2,2) = 0 -nn2(2,2) = 1 diff --git a/benchmarks/lammps-allegro/potential_files/meam.lib b/benchmarks/lammps-allegro/potential_files/meam.lib deleted file mode 100644 index c9ed9bc..0000000 --- a/benchmarks/lammps-allegro/potential_files/meam.lib +++ /dev/null @@ -1,6 +0,0 @@ -'Li' 'bcc' 8 3 6.94 -4.110938549041748 1.649999976158142 1.0 4.0 1.0 3.4871954917907715 1.649999976158142 0.949999988079071 -1 2.299999952316284 5.0 0.5 1.0 3 -'Mg' 'hcp' 12 12 24.305 -5.574794769287109 2.299999952316284 1.0 3.0 1.0 3.200000047683716 1.5499999523162842 0.5199999809265137 -1 9.0 -2.0 9.5 1.0 3 diff --git a/benchmarks/lammps-allegro/run.LiMg2nnmeam.in b/benchmarks/lammps-allegro/run.LiMg2nnmeam.in deleted file mode 100644 index 7a962bc..0000000 --- a/benchmarks/lammps-allegro/run.LiMg2nnmeam.in +++ /dev/null @@ -1,21 +0,0 @@ -units metal -boundary p p p -atom_style full -read_data data.LiMg.in -mass 1 6.94 -mass 2 24.305 -change_box all triclinic -velocity all create 600 14322 -pair_style meam/c -pair_coeff * * potential_files/meam.lib Li Mg potential_files/meam.app Li Mg -neighbor 0.3 bin -neigh_modify delay 10 - -fix 2 all npt temp 600 600 $(100.0*dt) aniso 0 0 $(1000.0*dt) -thermo 10000 -thermo_style custom step temp pe ke -# dump dump_1 all custom 10000 /ocean/projects/cts180021p/anneveli/ALBENCHMARK/datasets/Li3Mg1_bcc_npt_aniso_600_prod_10ns_d10000fs/Li3Mg1_bcc_npt_aniso_600K_10ns_d10000fs_*.xyz id element x y z fx fy fz -# dump_modify dump_1 element Li Mg -timestep 0.001 -run 10000000 - diff --git a/benchmarks/lammps-allegro/submit.sh b/benchmarks/lammps-allegro/submit.sh deleted file mode 100644 index ed144f8..0000000 --- a/benchmarks/lammps-allegro/submit.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/bash - -#SBATCH -t 00-01:00 -#SBATCH -J lammps_benchmark -#SBATCH -o logs/output.%j -#SBATCH -e logs/error.%j -#SBATCH -p GPU-shared -#SBATCH -N 1 -#SBATCH --ntasks=1 -#SBATCH --gpus=v100-32:1 -#SBATCH --mem-per-gpu=50000 -#SBATCH --mail-user=eannevel@andrew.cmu.edu - -set -e; set -o pipefail - -# Load required modules -# module load singularity - -# Build SIF, if it doesn't exist -if [[ ! -f lammps.sif ]]; then - singularity build lammps.sif docker://nvcr.io/hpc/lammps:29Oct2020 -fi - -readonly gpus_per_node=$(( SLURM_NTASKS / SLURM_JOB_NUM_NODES )) - -echo "Running Lennard Jones 8x8x8 example on ${SLURM_NTASKS} GPUS..." -# echo "Running 2NN MEAM example on ${SLURM_NTASKS} GPUS..." -srun --mpi=pmi2 \ -singularity run --nv -B ${PWD}:/host_pwd lammps.sif \ -lmp -k on g ${gpus_per_node} -sf kk -pk kokkos cuda/aware on neigh full comm device binsize 2.8 -var x 8 -var y 8 -var z 8 -in /host_pwd/run.LiMg2nnmeam.in -# lmp -k on g ${gpus_per_node} -sf kk -pk kokkos cuda/aware on neigh full comm device binsize 2.8 -var x 8 -var y 8 -var z 8 -in /host_pwd/in.lj.txt \ No newline at end of file