From 8fcac6d1ebf4c234fc7d0bf70e977cd0e1eaf6c4 Mon Sep 17 00:00:00 2001
From: Taras Lazariv <taras.lazariv@tu-dresden.de>
Date: Thu, 11 Nov 2021 17:54:11 +0100
Subject: [PATCH] Update example with creating jupyterhub kernel

---
 .../docs/software/big_data_frameworks_spark.md       | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/doc.zih.tu-dresden.de/docs/software/big_data_frameworks_spark.md b/doc.zih.tu-dresden.de/docs/software/big_data_frameworks_spark.md
index f9bb6e10e..84f5935a1 100644
--- a/doc.zih.tu-dresden.de/docs/software/big_data_frameworks_spark.md
+++ b/doc.zih.tu-dresden.de/docs/software/big_data_frameworks_spark.md
@@ -171,19 +171,19 @@ marie@login$ srun --pty --ntasks=1 --cpus-per-task=2 --mem-per-cpu=2500 --time=0
 When a node is allocated, install the required packages:
 
 ```console
-marie@compute$ cd
+marie@compute$ cd $HOME
 marie@compute$ mkdir jupyter-kernel
+marie@compute$ module load Python
 marie@compute$ virtualenv --system-site-packages jupyter-kernel/env  #Create virtual environment
 [...]
 marie@compute$ source jupyter-kernel/env/bin/activate    #Activate virtual environment.
-marie@compute$ pip install ipykernel
+(env) marie@compute$ pip install ipykernel
 [...]
-marie@compute$ python -m ipykernel install --user --name haswell-py3.7-spark --display-name="haswell-py3.7-spark"
+(env) marie@compute$ python -m ipykernel install --user --name haswell-py3.7-spark --display-name="haswell-py3.7-spark"
 Installed kernelspec haswell-py3.7-spark in [...]
 
-marie@compute$ pip install findspark
-
-marie@compute$ deactivate
+(env) marie@compute$ pip install findspark
+(env) marie@compute$ deactivate
 ```
 
 You are now ready to spawn a notebook with Spark.
-- 
GitLab