Skip to content

Module: spark_fuse.spark

create_session

create_session(app_name: str = 'spark-fuse', *, master: Optional[str] = None, extra_configs: Optional[Dict[str, str]] = None) -> SparkSession

Create a SparkSession with Delta configs and light Azure defaults.

  • Uses local[2] when no master is provided and not on Databricks or Fabric.
  • Applies Delta extensions; works both on Databricks and local delta-spark.
  • Accepts extra_configs to inject environment-specific credentials.
Source code in src/spark_fuse/spark.py
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
def create_session(
    app_name: str = "spark-fuse",
    *,
    master: Optional[str] = None,
    extra_configs: Optional[Dict[str, str]] = None,
) -> SparkSession:
    """Create a SparkSession with Delta configs and light Azure defaults.

    - Uses `local[2]` when no master is provided and not on Databricks or Fabric.
    - Applies Delta extensions; works both on Databricks and local delta-spark.
    - Accepts `extra_configs` to inject environment-specific credentials.
    """
    env = detect_environment()
    local_master = (master is None and env == "local") or (
        isinstance(master, str) and master.startswith("local")
    )

    if local_master:
        os.environ.setdefault("SPARK_LOCAL_IP", "127.0.0.1")
        os.environ.setdefault("SPARK_LOCAL_HOSTNAME", "localhost")

    python_exec = os.environ.get("PYSPARK_PYTHON", sys.executable)
    driver_python = os.environ.get("PYSPARK_DRIVER_PYTHON", python_exec)

    active = SparkSession.getActiveSession()
    if active is not None:
        try:
            current_exec = active.sparkContext.pythonExec
        except Exception:
            current_exec = None
        if current_exec and os.path.realpath(current_exec) != os.path.realpath(python_exec):
            active.stop()
            active = None

    if active is None:
        _ensure_java_available()

    builder = SparkSession.builder.appName(app_name)
    if master:
        builder = builder.master(master)
    elif env == "local":
        builder = builder.master("local[2]")

    builder = builder.config("spark.pyspark.python", python_exec).config(
        "spark.pyspark.driver.python", driver_python
    )

    builder = _apply_delta_configs(builder)

    # Minimal IO friendliness. Advanced auth must come via extra_configs or cluster env.
    builder = builder.config("spark.sql.shuffle.partitions", "8")

    if local_master:
        local_defaults = {
            "spark.driver.bindAddress": "127.0.0.1",
            "spark.driver.host": "localhost",
            "spark.port.maxRetries": "64",
        }
        for k, v in local_defaults.items():
            if extra_configs and k in extra_configs:
                continue
            builder = builder.config(k, v)

    if extra_configs:
        for k, v in extra_configs.items():
            builder = builder.config(k, v)

    spark = builder.getOrCreate()
    return spark

detect_environment

detect_environment() -> str

Detect a likely runtime environment: databricks, fabric, or local.

Heuristics only; callers should not rely on this for security decisions.

Source code in src/spark_fuse/spark.py
164
165
166
167
168
169
170
171
172
173
def detect_environment() -> str:
    """Detect a likely runtime environment: databricks, fabric, or local.

    Heuristics only; callers should not rely on this for security decisions.
    """
    if os.environ.get("DATABRICKS_RUNTIME_VERSION") or os.environ.get("DATABRICKS_CLUSTER_ID"):
        return "databricks"
    if os.environ.get("FABRIC_ENVIRONMENT") or os.environ.get("MS_FABRIC"):
        return "fabric"
    return "local"