Skip to main content

SparkSession

Struct SparkSession 

Source
pub struct SparkSession { /* private fields */ }
Expand description

Main entry point for creating DataFrames and executing queries Similar to PySpark’s SparkSession but using Polars as the backend

Implementations§

Source§

impl SparkSession

Source

pub fn new( app_name: Option<String>, master: Option<String>, config: HashMap<String, String>, ) -> Self

Source

pub fn create_or_replace_temp_view(&self, name: &str, df: DataFrame)

Register a DataFrame as a temporary view (PySpark: createOrReplaceTempView). The view is session-scoped and is dropped when the session is dropped.

Source

pub fn create_global_temp_view(&self, name: &str, df: DataFrame)

Global temp view (PySpark: createGlobalTempView). Persists across sessions within the same process.

Source

pub fn create_or_replace_global_temp_view(&self, name: &str, df: DataFrame)

Global temp view (PySpark: createOrReplaceGlobalTempView). Persists across sessions within the same process.

Source

pub fn drop_temp_view(&self, name: &str)

Drop a temporary view by name (PySpark: catalog.dropTempView). No error if the view does not exist.

Source

pub fn drop_global_temp_view(&self, name: &str) -> bool

Drop a global temporary view (PySpark: catalog.dropGlobalTempView). Removes from process-wide catalog.

Source

pub fn register_table(&self, name: &str, df: DataFrame)

Register a DataFrame as a saved table (PySpark: saveAsTable). Inserts into the tables catalog only.

Source

pub fn register_database(&self, name: &str)

Register a database/schema name (from CREATE DATABASE / CREATE SCHEMA). Persisted in session for listDatabases/databaseExists.

Source

pub fn list_database_names(&self) -> Vec<String>

List database names: built-in “default”, “global_temp”, plus any created via CREATE DATABASE / CREATE SCHEMA.

Source

pub fn database_exists(&self, name: &str) -> bool

True if the database name exists (default, global_temp, or created via CREATE DATABASE / CREATE SCHEMA).

Source

pub fn get_saved_table(&self, name: &str) -> Option<DataFrame>

Get a saved table by name (tables map only). Returns None if not in saved tables (temp views not checked).

Source

pub fn saved_table_exists(&self, name: &str) -> bool

True if the name exists in the saved-tables map (not temp views).

Source

pub fn table_exists(&self, name: &str) -> bool

Check if a table or temp view exists (PySpark: catalog.tableExists). True if name is in temp views, saved tables, global temp, or warehouse.

Source

pub fn list_global_temp_view_names(&self) -> Vec<String>

Return global temp view names (process-scoped). PySpark: catalog.listTables(dbName=“global_temp”).

Source

pub fn list_temp_view_names(&self) -> Vec<String>

Return temporary view names in this session.

Source

pub fn list_table_names(&self) -> Vec<String>

Return saved table names in this session (saveAsTable / write_delta_table).

Source

pub fn drop_table(&self, name: &str) -> bool

Drop a saved table by name (removes from tables catalog only). No-op if not present.

Source

pub fn drop_database(&self, name: &str) -> bool

Drop a database/schema by name (from DROP SCHEMA / DROP DATABASE). Removes from registered databases only. Does not drop “default” or “global_temp”. No-op if not present (or if_exists). Returns true if removed.

Source

pub fn warehouse_dir(&self) -> Option<&str>

Return spark.sql.warehouse.dir from config if set. Enables disk-backed saveAsTable.

Source

pub fn table(&self, name: &str) -> Result<DataFrame, PolarsError>

Look up a table or temp view by name (PySpark: table(name)). Resolution order: (1) global_temp.xyz from global catalog, (2) temp view, (3) saved table, (4) warehouse.

Source

pub fn builder() -> SparkSessionBuilder

Source

pub fn from_config(config: &SparklessConfig) -> SparkSession

Create a session from a SparklessConfig. Equivalent to SparkSession::builder().with_config(config).get_or_create().

Source

pub fn get_config(&self) -> &HashMap<String, String>

Return a reference to the session config (for catalog/conf compatibility).

Source

pub fn is_case_sensitive(&self) -> bool

Whether column names are case-sensitive (PySpark: spark.sql.caseSensitive). Default is false (case-insensitive matching).

Source

pub fn register_udf<F>(&self, name: &str, f: F) -> Result<(), PolarsError>
where F: Fn(&[Series]) -> Result<Series, PolarsError> + Send + Sync + 'static,

Register a Rust UDF. Session-scoped. Use with call_udf. PySpark: spark.udf.register (Python) or equivalent.

Source

pub fn create_dataframe( &self, data: Vec<(i64, i64, String)>, column_names: Vec<&str>, ) -> Result<DataFrame, PolarsError>

Create a DataFrame from a vector of tuples (i64, i64, String)

§Example
use robin_sparkless::session::SparkSession;

let spark = SparkSession::builder().app_name("test").get_or_create();
let df = spark.create_dataframe(
    vec![
        (1, 25, "Alice".to_string()),
        (2, 30, "Bob".to_string()),
    ],
    vec!["id", "age", "name"],
)?;
Source

pub fn create_dataframe_engine( &self, data: Vec<(i64, i64, String)>, column_names: Vec<&str>, ) -> Result<DataFrame, EngineError>

Same as create_dataframe but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn create_dataframe_from_polars(&self, df: PlDataFrame) -> DataFrame

Create a DataFrame from a Polars DataFrame

Source

pub fn infer_schema_from_json_rows( rows: &[Vec<JsonValue>], names: &[String], ) -> Vec<(String, String)>

Infer schema (name, dtype_str) from JSON rows by scanning the first non-null value per column. Used by createDataFrame(data, schema=None) when schema is omitted or only column names given.

Source

pub fn create_dataframe_from_rows( &self, rows: Vec<Vec<JsonValue>>, schema: Vec<(String, String)>, ) -> Result<DataFrame, PolarsError>

Create a DataFrame from rows and a schema (arbitrary column count and types).

rows: each inner vec is one row; length must match schema length. Values are JSON-like (i64, f64, string, bool, null, object, array). schema: list of (column_name, dtype_string), e.g. [("id", "bigint"), ("name", "string")]. Supported dtype strings: bigint, int, long, double, float, string, str, varchar, boolean, bool, date, timestamp, datetime, list, array, array<element_type>, structfield:type,.... When rows is empty and schema is non-empty, returns an empty DataFrame with that schema (issue #519). Use with write.format("parquet").saveAsTable(...) then append; PySpark would fail with “can not infer schema from empty dataset”.

Source

pub fn create_dataframe_from_rows_engine( &self, rows: Vec<Vec<JsonValue>>, schema: Vec<(String, String)>, ) -> Result<DataFrame, EngineError>

Same as create_dataframe_from_rows but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn range( &self, start: i64, end: i64, step: i64, ) -> Result<DataFrame, PolarsError>

Create a DataFrame with a single column id (bigint) containing values from start to end (exclusive) with step. PySpark: spark.range(end) or spark.range(start, end, step).

  • range(end) → 0 to end-1, step 1
  • range(start, end) → start to end-1, step 1
  • range(start, end, step) → start, start+step, … up to but not including end
Source

pub fn read_csv(&self, path: impl AsRef<Path>) -> Result<DataFrame, PolarsError>

Read a CSV file.

Uses Polars’ CSV reader with default options:

  • Header row is inferred (default: true)
  • Schema is inferred from first 100 rows
§Example
use robin_sparkless::SparkSession;

let spark = SparkSession::builder().app_name("test").get_or_create();
let df_result = spark.read_csv("data.csv");
// Handle the Result as appropriate in your application
Source

pub fn read_csv_engine( &self, path: impl AsRef<Path>, ) -> Result<DataFrame, EngineError>

Same as read_csv but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn read_parquet( &self, path: impl AsRef<Path>, ) -> Result<DataFrame, PolarsError>

Read a Parquet file.

Uses Polars’ Parquet reader. Parquet files have embedded schema, so schema inference is automatic.

§Example
use robin_sparkless::SparkSession;

let spark = SparkSession::builder().app_name("test").get_or_create();
let df_result = spark.read_parquet("data.parquet");
// Handle the Result as appropriate in your application
Source

pub fn read_parquet_engine( &self, path: impl AsRef<Path>, ) -> Result<DataFrame, EngineError>

Same as read_parquet but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn read_json( &self, path: impl AsRef<Path>, ) -> Result<DataFrame, PolarsError>

Read a JSON file (JSONL format - one JSON object per line).

Uses Polars’ JSONL reader with default options:

  • Schema is inferred from first 100 rows
§Example
use robin_sparkless::SparkSession;

let spark = SparkSession::builder().app_name("test").get_or_create();
let df_result = spark.read_json("data.json");
// Handle the Result as appropriate in your application
Source

pub fn read_json_engine( &self, path: impl AsRef<Path>, ) -> Result<DataFrame, EngineError>

Same as read_json but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn sql(&self, query: &str) -> Result<DataFrame, PolarsError>

Execute a SQL query (SELECT only). Tables must be registered with create_or_replace_temp_view. Requires the sql feature. Supports: SELECT (columns or *), FROM (single table or JOIN), WHERE (basic predicates), GROUP BY + aggregates, ORDER BY, LIMIT.

Source

pub fn table_engine(&self, name: &str) -> Result<DataFrame, EngineError>

Same as table but returns EngineError. Use in bindings to avoid Polars.

Source

pub fn read_delta(&self, name_or_path: &str) -> Result<DataFrame, PolarsError>

Stub when delta feature is disabled. Still supports reading by table name.

Source

pub fn read_delta_with_version( &self, name_or_path: &str, version: Option<i64>, ) -> Result<DataFrame, PolarsError>

Source

pub fn read_delta_from_path( &self, _path: impl AsRef<Path>, ) -> Result<DataFrame, PolarsError>

Source

pub fn stop(&self)

Stop the session (cleanup resources)

Source§

impl SparkSession

Source

pub fn read(&self) -> DataFrameReader

Get a DataFrameReader for reading files

Trait Implementations§

Source§

impl Clone for SparkSession

Source§

fn clone(&self) -> SparkSession

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Default for SparkSession

Source§

fn default() -> Self

Returns the “default value” for a type. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> DynClone for T
where T: Clone,

Source§

fn __clone_box(&self, _: Private) -> *mut ()

Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> IntoEither for T

Source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
Source§

impl<T> Key for T
where T: Clone,

Source§

fn align() -> usize

The alignment necessary for the key. Must return a power of two.
Source§

fn size(&self) -> usize

The size of the key in bytes.
Source§

unsafe fn init(&self, ptr: *mut u8)

Initialize the key in the given memory location. Read more
Source§

unsafe fn get<'a>(ptr: *const u8) -> &'a T

Get a reference to the key from the given memory location. Read more
Source§

unsafe fn drop_in_place(ptr: *mut u8)

Drop the key in place. Read more
Source§

impl<T> Pointable for T

Source§

const ALIGN: usize

The alignment of pointer.
Source§

type Init = T

The type for initializers.
Source§

unsafe fn init(init: <T as Pointable>::Init) -> usize

Initializes a with the given initializer. Read more
Source§

unsafe fn deref<'a>(ptr: usize) -> &'a T

Dereferences the given pointer. Read more
Source§

unsafe fn deref_mut<'a>(ptr: usize) -> &'a mut T

Mutably dereferences the given pointer. Read more
Source§

unsafe fn drop(ptr: usize)

Drops the object pointed to by the given pointer. Read more
Source§

impl<T> PolicyExt for T
where T: ?Sized,

Source§

fn and<P, B, E>(self, other: P) -> And<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow only if self and other return Action::Follow. Read more
Source§

fn or<P, B, E>(self, other: P) -> Or<T, P>
where T: Policy<B, E>, P: Policy<B, E>,

Create a new Policy that returns Action::Follow if either self or other returns Action::Follow. Read more
Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> PlanCallbackArgs for T

Source§

impl<T> PlanCallbackOut for T