Skip to content

Commit

Permalink
Use lazy_loader for pyspark (flyteorg#168)
Browse files Browse the repository at this point in the history
* Remove dependence on pyspark for non-spark notebooks

* Use lazy_loader for pyspark

* lint

* lint
  • Loading branch information
akhurana001 authored Aug 28, 2020
1 parent 997e722 commit d088707
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 5 deletions.
2 changes: 1 addition & 1 deletion flytekit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

import flytekit.plugins # noqa: F401

__version__ = "0.12.1"
__version__ = "0.12.2"
8 changes: 4 additions & 4 deletions flytekit/contrib/notebook/helper.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import os as _os

import six as _six
from pyspark import SparkConf, SparkContext

from flytekit.common.types.helpers import pack_python_std_map_to_literal_map as _packer
from flytekit.contrib.notebook.supported_types import notebook_types_map as _notebook_types_map
from flytekit.plugins import pyspark as _pyspark


def record_outputs(outputs=None):
Expand Down Expand Up @@ -35,9 +35,9 @@ def get_spark_context(spark_conf):
# We run in cluster-mode in Flyte.
# Ref https://github.com/lyft/flyteplugins/blob/master/go/tasks/v1/flytek8s/k8s_resource_adds.go#L46
if "FLYTE_INTERNAL_EXECUTION_ID" in _os.environ:
return SparkContext()
return _pyspark.SparkContext()

# Add system spark-conf for local/notebook based execution.
spark_conf.add(("spark.master", "local"))
conf = SparkConf().setAll(spark_conf)
return SparkContext(conf=conf)
conf = _pyspark.SparkConf().setAll(spark_conf)
return _pyspark.SparkContext(conf=conf)
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"urllib3>=1.22,<2.0.0",
"wrapt>=1.0.0,<2.0.0",
"papermill>=1.2.0",
"ipykernel>=5.0.0",
],
extras_require=extras_require,
scripts=[
Expand Down

0 comments on commit d088707

Please sign in to comment.