diff --git a/2_multiple_job_steps/multiple_job_steps.sh b/2_multiple_job_steps/multiple_job_steps.sh
index 5cb6614a70a84e3a4e51842474ab097b21bc5b44..1740757667bc8788bd8f2b343dcd9ad16ca79cf4 100644
--- a/2_multiple_job_steps/multiple_job_steps.sh
+++ b/2_multiple_job_steps/multiple_job_steps.sh
@@ -9,28 +9,54 @@
 # Loading Python into the environment
 module load python/anaconda3-2024.02-3.11.7
 
-# Specify_ input file
-file=data_4.txt
-temporary_folder=/local/data1/${USER}
-working_folder=${temporary_folder}/${SLURM_JOB_ID}
-
-# Step 1 - Create a temporary folder to store data in.
-srun --cpus-per-task=1 mkdir -v -m 700 -p ${working_folder}
-
-# Step 2 - Copy indata to the temporary folder.
-srun --cpus-per-task=1 cp -v ${PWD}/../data/${file} ${working_folder}
+# Defining folders on Lundgren's local storage.
+lundgren_local_folder=/local/data1/${USER}
+lundgren_working_folder=${lundgren_local_folder}/${SLURM_JOB_ID}
+
+# Setting location and filename of the data and output files on the home folder.
+# ${PWD} = curent working directory.
+# In this case:
+# The home_data_folder points to the folder data that is one directory above the current one.
+# The home_output_folder points to the current working directory.
+# The data file to use is data_4.txt
+home_data_folder=${PWD}/../data
+home_output_folder=${PWD}
+data_file=data_4.txt
+
+# Setting location and filename of the data and output files on Lundgren's local storage.
+# The folders and files set here will be contained in the lundgren_working_folder.
+# In this case:
+# The lundgren_data_folder points to the folder data in lundgren_working_folder
+# The lundgren_output_folder points to the folder output in lundgren_working_folder
+# The output file is set to output.csv
+lundgren_data_folder=data
+lundgren_output_folder=output
+output_file=output.csv
+
+
+# Step 1a - Create temporary working folder.
+srun --cpus-per-task=1 mkdir -v -m 700 -p ${lundgren_working_folder}
+
+# Step 1b - Create a temporary folder to store data in.
+srun --cpus-per-task=1 mkdir -v -m 700 -p ${lundgren_working_folder}/${lundgren_data_folder}
+
+# Step 1c - Create a temporary folder to store output in.
+srun --cpus-per-task=1 mkdir -v -m 700 -p ${lundgren_working_folder}/${lundgren_output_folder}
+
+# Step 2 - Copy data to the temporary folder.
+srun --cpus-per-task=1 cp -v ${home_data_folder}/${data_file} $ ${lundgren_working_folder}/${lundgren_data_folder}
 
 # Step 3 - Start job stage
-srun python ../code/parallel_task.py ${working_folder}/${file} ${working_folder}/output.csv
+srun python ${PWD}/../code/parallel_task.py ${lundgren_working_folder}/${lundgren_data_folder}/${data_file} ${lundgren_working_folder}/${lundgren_output_folder}/${output_file}
 
-# Step 4 - Compress data all csv files.
-srun --cpus-per-task=1 tar -czvf ${working_folder}/output.tar.gz -C ${working_folder} $(cd ${working_folder} && ls *.csv)
+# Step 4 - Compress data csv file.
+srun --cpus-per-task=1 tar -czvf ${lundgren_working_folder}/${lundgren_output_folder}/output.tar.gz -C ${lundgren_working_folder}/${lundgren_output_folder} $(cd ${lundgren_working_folder}/${lundgren_output_folder} && ls ${output_file})
 
 # Step 5 - Move output data to home folder
-srun --cpus-per-task=1 mv -v ${working_folder}/output.tar.gz ${PWD}
+srun --cpus-per-task=1 mv -v ${lundgren_working_folder}/${lundgren_output_folder}/output.tar.gz ${home_output_folder}
 
 # Step 6a - Remove temporary files.
-srun --cpus-per-task=1 rm -rfv ${working_folder}
+srun --cpus-per-task=1 rm -rfv ${lundgren_working_folder}
 
 # Step 6b - Clear folder
-srun --cpus-per-task=1 test -n "$(ls -A "$temporary_folder")" || rmdir -v "$temporary_folder"
\ No newline at end of file
+srun --cpus-per-task=1 test -n "$(ls -A "$lundgren_local_folder")" || rmdir -v "$lundgren_local_folder"
\ No newline at end of file