25
loading...
This website collects cookies to deliver better user experience
name: python_env
channels:
- conda-forge
- defaults
dependencies:
- python=3.7
- pycodestyle
- numpy
- pandas
- scipy
- grpcio
- protobuf
- pandasql
- ipython
- ipykernel
- jupyter_client
- panel
- pyyaml
- seaborn
- plotnine
- hvplot
- intake
- intake-parquet
- intake-xarray
- altair
- vega_datasets
- pyarrow
conda pack -n python_env
hadoop fs -put python_env.tar.gz /tmp
# The python conda tar should be publicly accessible, so need to change permission here.
hadoop fs -chmod 644 /tmp/pyspark_env.tar.gz
%python.conf
# set zeppelin.interpreter.launcher to be yarn, so that python interpreter run in yarn container,
# otherwise python interpreter run as local process in the zeppelin server host.
zeppelin.interpreter.launcher yarn
# zeppelin.yarn.dist.archives can be either local file or hdfs file
zeppelin.yarn.dist.archives hdfs:///tmp/python_env.tar.gz#environment
# conda environment name, aka the folder name in the working directory of yarn container
zeppelin.interpreter.conda.env.name environment
%python
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot([1,2,3,4])
plt.ylabel('some numbers')
plt.show()