QNLP  v1.0
run_scaled_exp.sh
Go to the documentation of this file.
1 #!/bin/bash
2 #SBATCH -J QNLP
3 #SBATCH -N <NUM_NODES>
4 #SBATCH -p <Queue>
5 #SBATCH -t 1-23:59:59
6 #SBATCH -A <Account name>
7 #SBATCH --mail-user=<email address>
8 #SBATCH --mail-type=ALL
9 
10 cd /path/to/qnlp/build
11 
12 #Ensure correct modules are loaded
13 module load intel/2019u5 gcc
14 
15 #Set up environment
16 source ../load_env.sh
17 
18 #Ensure these numbers are powers of 2 for best performance
19 NUM_CORES_PER_SOCKET = <num_cores>
20 NUM_PROCS_PER_NODE = <NUM_CORES_PER_SOCKET * num_sockets_per_node>
21 NUM_PROCS=<NUM_NODES * NUM_CORES_PER_SOCKET>
22 
23 # Program name:
24 # PROG=./QNLP_End2End_MPI.py
25 # The above program performs all preprocessing, encodes the preprocessed data, and compares a test state with the encoded meaning-space.
26 # The comparison adjusts the amplitudes of the encoded states, and the resulting distribution is sampled the requested number of times.
27 
28 # PROG=./QNLP_Overlap.py
29 # The above program performs all preprocessing, encodes the preprocessed data, and compares a test state with the encoded meaning-space for two simulators.
30 # All potential, but not encoded patterns are then assembled as test patterns; each pair of test patterns is mapped to the state as with QNLP_End2End_MPI.py,
31 # but the fidelities of the two distributions are now compared and returned. This will allow us to determine similarity of separate patterns encoded in
32 # a meaning-space determined by the given corpus. Sampling should only be performed once for this case.
33 
34 PROG=./QNLP_End2End_MPI.py
35 NUM_SAMPLES=1000
36 
37 # The corpus to analyse
38 CORPUS=/path/to/qnlp/corpus/AIW_PG.txt
39 
40 #Define runtime parameters for preprocessing stage and run job
41 NUM_BASIS_NOUN=8 \
42 NUM_BASIS_VERB=4 \
43 BASIS_NOUN_DIST_CUTOFF=5 \
44 BASIS_VERB_DIST_CUTOFF=5 \
45 VERB_NOUN_DIST_CUTOFF=4 \
46 srun -n $NUM_PROCS --ntasks-per-socket=$NUM_CORES_PER_SOCKET -c 1 --cpu-bind=cores -m plane=$NUM_PROCS_PER_NODE python $PROG $CORPUS $NUM_SAMPLES
47 cp slurm-"$SLURM_JOB_ID".out log_"$SLURM_JOB_ID".out
48 tar cvzf "$SLURM_JOB_ID".tar.gz ./*.pdf ./*.pkl og_"$SLURM_JOB_ID".out