11# How to use iPython Notebook
22
331 . Install Anaconda
4- 2 . Launch Anaconda luncher
5- 3 . Launch ipython-notebook
4+ 2 . Launch Anaconda launcher
5+ 3 . Launch ipython-notebook
664 . Create an iPython profile for use with PySpark
77 1 . Make sure you have exported PySpark classes to your python path and build Apache Spark.
88 To export PySpark classes, add the following to your ~ /.bash_profile:
@@ -34,9 +34,21 @@ vim ~/.ipython/profile_pyspark/ipython_notebook_config.py
3434 ` ` `
3535 ` ` ` bash
3636 c = get_config ()
37- c.NotebookApp.ip = ' *'
38- c.NotebookApp.open_browser = False
39- c.NotebookApp.port = 8880
37+
38+ # kernel configuration
39+ c.IPKernelApp.pylab = ' inline' # set %matplotlib inline always
40+
41+ # notebook configuration
42+ c.NotebookApp.ip = '*' # '*' == to bind on all IPs
43+ # do not open the browser at start of ipython notebook
44+ # so that we can point the ipython notebook address
45+ # in an active web browser
46+ c.NotebookApp.open_browser = False
47+
48+ # (optional) you can add password to your notebook if desired
49+
50+ # set a fixed port number that does not conflict with other iPython profiles
51+ c.NotebookApp.port = 8880
4052 ` ` `
41536. Create PySpark Setup configuration
4254 ` ` ` bash
@@ -50,6 +62,10 @@ vim ~/.ipython/profile_pyspark/ipython_notebook_config.py
5062 # setup spark home
5163 findspark.init ()
5264 spark_home = findspark.find ()
65+
66+ # setup spark home approach #2
67+ # make sure you have already set $SPARK_HOME in $PATH
68+ # spark_home = os.environ.get('SPARK_HOME', None)
5369
5470 # add spark's home directory to path
5571 sys.path.insert(0, os.path.join(spark_home, " python" ))
@@ -72,8 +88,11 @@ vim ~/.ipython/profile_pyspark/ipython_notebook_config.py
7288 from pyspark.sql import SQLContext
7389
7490 # setup SparkContext
75- sc = SparkContext._active_spark_context
76-
91+ try:
92+ sc = SparkContext ()
93+ except:
94+ sc = SparkContext._active_spark_context
95+
7796 # setup SQLContext
7897 sqlCtx = SQLContext(sc)
7998 ` ` `
0 commit comments