pub struct ClickHouseSessionContext { /* private fields */ }
Expand description
Wrapper for SessionContext
which allows running arbitrary ClickHouse
functions.
Implementations§
Source§impl ClickHouseSessionContext
impl ClickHouseSessionContext
pub fn new( ctx: SessionContext, extension_planners: Option<Vec<Arc<dyn ExtensionPlanner + Send + Sync>>>, ) -> Self
pub fn with_expr_planner(self, expr_planner: Arc<dyn ExprPlanner>) -> Self
pub fn into_session_context(self) -> SessionContext
Sourcepub async fn sql(&self, sql: &str) -> Result<DataFrame>
pub async fn sql(&self, sql: &str) -> Result<DataFrame>
§Errors
Returns an error if the SQL query is invalid or if the query execution fails.
Sourcepub async fn sql_with_options(
&self,
sql: &str,
options: SQLOptions,
) -> Result<DataFrame>
pub async fn sql_with_options( &self, sql: &str, options: SQLOptions, ) -> Result<DataFrame>
§Errors
Returns an error if the SQL query is invalid or if the query execution fails.
Sourcepub async fn statement_to_plan(
&self,
state: &SessionState,
statement: Statement,
) -> Result<LogicalPlan>
pub async fn statement_to_plan( &self, state: &SessionState, statement: Statement, ) -> Result<LogicalPlan>
§Errors
- Returns an error if the SQL query is invalid or if the query execution fails.
Methods from Deref<Target = SessionContext>§
Sourcepub async fn read_csv<P>(
&self,
table_paths: P,
options: CsvReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
pub async fn read_csv<P>(
&self,
table_paths: P,
options: CsvReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
Creates a DataFrame
for reading a CSV data source.
For more control such as reading multiple files, you can use
read_table
with a super::ListingTable
.
Example usage is given below:
use datafusion::prelude::*;
let ctx = SessionContext::new();
// You can read a single file using `read_csv`
let df = ctx.read_csv("tests/data/example.csv", CsvReadOptions::new()).await?;
// you can also read multiple files:
let df = ctx.read_csv(vec!["tests/data/example.csv", "tests/data/example.csv"], CsvReadOptions::new()).await?;
Sourcepub async fn register_csv(
&self,
table_ref: impl Into<TableReference>,
table_path: impl AsRef<str>,
options: CsvReadOptions<'_>,
) -> Result<(), DataFusionError>
pub async fn register_csv( &self, table_ref: impl Into<TableReference>, table_path: impl AsRef<str>, options: CsvReadOptions<'_>, ) -> Result<(), DataFusionError>
Registers a CSV file as a table which can referenced from SQL statements executed against this context.
Sourcepub async fn write_csv(
&self,
plan: Arc<dyn ExecutionPlan>,
path: impl AsRef<str>,
) -> Result<(), DataFusionError>
pub async fn write_csv( &self, plan: Arc<dyn ExecutionPlan>, path: impl AsRef<str>, ) -> Result<(), DataFusionError>
Executes a query and writes the results to a partitioned CSV file.
Sourcepub async fn read_json<P>(
&self,
table_paths: P,
options: NdJsonReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
pub async fn read_json<P>(
&self,
table_paths: P,
options: NdJsonReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
Creates a DataFrame
for reading an JSON data source.
For more control such as reading multiple files, you can use
read_table
with a super::ListingTable
.
For an example, see read_csv
Sourcepub async fn register_json(
&self,
table_ref: impl Into<TableReference>,
table_path: impl AsRef<str>,
options: NdJsonReadOptions<'_>,
) -> Result<(), DataFusionError>
pub async fn register_json( &self, table_ref: impl Into<TableReference>, table_path: impl AsRef<str>, options: NdJsonReadOptions<'_>, ) -> Result<(), DataFusionError>
Registers a JSON file as a table that it can be referenced from SQL statements executed against this context.
Sourcepub async fn write_json(
&self,
plan: Arc<dyn ExecutionPlan>,
path: impl AsRef<str>,
) -> Result<(), DataFusionError>
pub async fn write_json( &self, plan: Arc<dyn ExecutionPlan>, path: impl AsRef<str>, ) -> Result<(), DataFusionError>
Executes a query and writes the results to a partitioned JSON file.
Sourcepub async fn read_parquet<P>(
&self,
table_paths: P,
options: ParquetReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
pub async fn read_parquet<P>(
&self,
table_paths: P,
options: ParquetReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
Creates a DataFrame
for reading a Parquet data source.
For more control such as reading multiple files, you can use
read_table
with a super::ListingTable
.
For an example, see read_csv
§Note: Statistics
NOTE: by default, statistics are collected when reading the Parquet files This can slow down the initial DataFrame creation while greatly accelerating queries with certain filters.
To disable statistics collection, set the config option
datafusion.execution.collect_statistics
to false
. See
ConfigOptions
and ExecutionOptions::collect_statistics
for more
details.
Sourcepub async fn register_parquet(
&self,
table_ref: impl Into<TableReference>,
table_path: impl AsRef<str>,
options: ParquetReadOptions<'_>,
) -> Result<(), DataFusionError>
pub async fn register_parquet( &self, table_ref: impl Into<TableReference>, table_path: impl AsRef<str>, options: ParquetReadOptions<'_>, ) -> Result<(), DataFusionError>
Registers a Parquet file as a table that can be referenced from SQL statements executed against this context.
§Note: Statistics
Statistics are not collected by default. See read_parquet
for more
details and how to enable them.
Sourcepub async fn write_parquet(
&self,
plan: Arc<dyn ExecutionPlan>,
path: impl AsRef<str>,
writer_properties: Option<WriterProperties>,
) -> Result<(), DataFusionError>
pub async fn write_parquet( &self, plan: Arc<dyn ExecutionPlan>, path: impl AsRef<str>, writer_properties: Option<WriterProperties>, ) -> Result<(), DataFusionError>
Executes a query and writes the results to a partitioned Parquet file.
Sourcepub async fn refresh_catalogs(&self) -> Result<(), DataFusionError>
pub async fn refresh_catalogs(&self) -> Result<(), DataFusionError>
Finds any ListingSchemaProvider
s and instructs them to reload tables from “disk”
Sourcepub fn session_start_time(&self) -> DateTime<Utc>
pub fn session_start_time(&self) -> DateTime<Utc>
Returns the time this SessionContext
was created
Sourcepub fn add_optimizer_rule(
&self,
optimizer_rule: Arc<dyn OptimizerRule + Send + Sync>,
)
pub fn add_optimizer_rule( &self, optimizer_rule: Arc<dyn OptimizerRule + Send + Sync>, )
Adds an optimizer rule to the end of the existing rules.
See SessionState
for more control of when the rule is applied.
Sourcepub fn add_analyzer_rule(
&self,
analyzer_rule: Arc<dyn AnalyzerRule + Send + Sync>,
)
pub fn add_analyzer_rule( &self, analyzer_rule: Arc<dyn AnalyzerRule + Send + Sync>, )
Adds an analyzer rule to the end of the existing rules.
See SessionState
for more control of when the rule is applied.
Sourcepub fn register_object_store(
&self,
url: &Url,
object_store: Arc<dyn ObjectStore>,
) -> Option<Arc<dyn ObjectStore>>
pub fn register_object_store( &self, url: &Url, object_store: Arc<dyn ObjectStore>, ) -> Option<Arc<dyn ObjectStore>>
Registers an ObjectStore
to be used with a specific URL prefix.
See RuntimeEnv::register_object_store
for more details.
§Example: register a local object store for the “file://” URL prefix
let object_store_url = ObjectStoreUrl::parse("file://").unwrap();
let object_store = object_store::local::LocalFileSystem::new();
let ctx = SessionContext::new();
// All files with the file:// url prefix will be read from the local file system
ctx.register_object_store(object_store_url.as_ref(), Arc::new(object_store));
Sourcepub fn register_batch(
&self,
table_name: &str,
batch: RecordBatch,
) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
pub fn register_batch( &self, table_name: &str, batch: RecordBatch, ) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
Registers the RecordBatch
as the specified table name
Sourcepub fn runtime_env(&self) -> Arc<RuntimeEnv>
pub fn runtime_env(&self) -> Arc<RuntimeEnv>
Return the RuntimeEnv used to run queries with this SessionContext
Sourcepub fn session_id(&self) -> String
pub fn session_id(&self) -> String
Returns an id that uniquely identifies this SessionContext
.
Sourcepub fn table_factory(
&self,
file_type: &str,
) -> Option<Arc<dyn TableProviderFactory>>
pub fn table_factory( &self, file_type: &str, ) -> Option<Arc<dyn TableProviderFactory>>
Return the TableProviderFactory
that is registered for the
specified file type, if any.
Sourcepub fn enable_ident_normalization(&self) -> bool
pub fn enable_ident_normalization(&self) -> bool
Return the enable_ident_normalization
of this Session
Sourcepub fn copied_config(&self) -> SessionConfig
pub fn copied_config(&self) -> SessionConfig
Return a copied version of config for this Session
Sourcepub fn copied_table_options(&self) -> TableOptions
pub fn copied_table_options(&self) -> TableOptions
Return a copied version of table options for this Session
Sourcepub async fn sql(&self, sql: &str) -> Result<DataFrame, DataFusionError>
pub async fn sql(&self, sql: &str) -> Result<DataFrame, DataFusionError>
Creates a DataFrame
from SQL query text.
Note: This API implements DDL statements such as CREATE TABLE
and
CREATE VIEW
and DML statements such as INSERT INTO
with in-memory
default implementations. See Self::sql_with_options
.
§Example: Running SQL queries
See the example on Self
§Example: Creating a Table with SQL
use datafusion::prelude::*;
let ctx = SessionContext::new();
ctx
.sql("CREATE TABLE foo (x INTEGER)")
.await?
.collect()
.await?;
assert!(ctx.table_exist("foo").unwrap());
Sourcepub async fn sql_with_options(
&self,
sql: &str,
options: SQLOptions,
) -> Result<DataFrame, DataFusionError>
pub async fn sql_with_options( &self, sql: &str, options: SQLOptions, ) -> Result<DataFrame, DataFusionError>
Creates a DataFrame
from SQL query text, first validating
that the queries are allowed by options
§Example: Preventing Creating a Table with SQL
If you want to avoid creating tables, or modifying data or the
session, set SQLOptions
appropriately:
use datafusion::prelude::*;
let ctx = SessionContext::new();
let options = SQLOptions::new()
.with_allow_ddl(false);
let err = ctx.sql_with_options("CREATE TABLE foo (x INTEGER)", options)
.await
.unwrap_err();
assert!(
err.to_string().starts_with("Error during planning: DDL not supported: CreateMemoryTable")
);
Sourcepub fn parse_sql_expr(
&self,
sql: &str,
df_schema: &DFSchema,
) -> Result<Expr, DataFusionError>
pub fn parse_sql_expr( &self, sql: &str, df_schema: &DFSchema, ) -> Result<Expr, DataFusionError>
Creates logical expressions from SQL query text.
§Example: Parsing SQL queries
// datafusion will parse number as i64 first.
let sql = "a > 10";
let expected = col("a").gt(lit(10 as i64));
// provide type information that `a` is an Int32
let schema = Schema::new(vec![Field::new("a", DataType::Int32, true)]);
let df_schema = DFSchema::try_from(schema).unwrap();
let expr = SessionContext::new()
.parse_sql_expr(sql, &df_schema)?;
assert_eq!(expected, expr);
Sourcepub async fn execute_logical_plan(
&self,
plan: LogicalPlan,
) -> Result<DataFrame, DataFusionError>
pub async fn execute_logical_plan( &self, plan: LogicalPlan, ) -> Result<DataFrame, DataFusionError>
Execute the LogicalPlan
, return a DataFrame
. This API
is not featured limited (so all SQL such as CREATE TABLE
and
COPY
will be run).
If you wish to limit the type of plan that can be run from
SQL, see Self::sql_with_options
and
SQLOptions::verify_plan
.
Sourcepub fn create_physical_expr(
&self,
expr: Expr,
df_schema: &DFSchema,
) -> Result<Arc<dyn PhysicalExpr>, DataFusionError>
pub fn create_physical_expr( &self, expr: Expr, df_schema: &DFSchema, ) -> Result<Arc<dyn PhysicalExpr>, DataFusionError>
Create a PhysicalExpr
from an Expr
after applying type
coercion and function rewrites.
Note: The expression is not simplified or otherwise optimized:
a = 1 + 2
will not be simplified to a = 3
as this is a more involved process.
See the expr_api example for how to simplify expressions.
§Example
// a = 1 (i64)
let expr = col("a").eq(lit(1i64));
// provide type information that `a` is an Int32
let schema = Schema::new(vec![Field::new("a", DataType::Int32, true)]);
let df_schema = DFSchema::try_from(schema).unwrap();
// Create a PhysicalExpr. Note DataFusion automatically coerces (casts) `1i64` to `1i32`
let physical_expr = SessionContext::new()
.create_physical_expr(expr, &df_schema).unwrap();
§See Also
SessionState::create_physical_expr
for a lower level API
Sourcepub fn register_variable(
&self,
variable_type: VarType,
provider: Arc<dyn VarProvider + Send + Sync>,
)
pub fn register_variable( &self, variable_type: VarType, provider: Arc<dyn VarProvider + Send + Sync>, )
Registers a variable provider within this context.
Sourcepub fn register_udtf(&self, name: &str, fun: Arc<dyn TableFunctionImpl>)
pub fn register_udtf(&self, name: &str, fun: Arc<dyn TableFunctionImpl>)
Register a table UDF with this context
Sourcepub fn register_udf(&self, f: ScalarUDF)
pub fn register_udf(&self, f: ScalarUDF)
Registers a scalar UDF within this context.
Note in SQL queries, function names are looked up using lowercase unless the query uses quotes. For example,
SELECT MY_FUNC(x)...
will look for a function named"my_func"
SELECT "my_FUNC"(x)
will look for a function named"my_FUNC"
Any functions registered with the udf name or its aliases will be overwritten with this new function
Sourcepub fn register_udaf(&self, f: AggregateUDF)
pub fn register_udaf(&self, f: AggregateUDF)
Registers an aggregate UDF within this context.
Note in SQL queries, aggregate names are looked up using lowercase unless the query uses quotes. For example,
SELECT MY_UDAF(x)...
will look for an aggregate named"my_udaf"
SELECT "my_UDAF"(x)
will look for an aggregate named"my_UDAF"
Sourcepub fn register_udwf(&self, f: WindowUDF)
pub fn register_udwf(&self, f: WindowUDF)
Registers a window UDF within this context.
Note in SQL queries, window function names are looked up using lowercase unless the query uses quotes. For example,
SELECT MY_UDWF(x)...
will look for a window function named"my_udwf"
SELECT "my_UDWF"(x)
will look for a window function named"my_UDWF"
Sourcepub fn deregister_udf(&self, name: &str)
pub fn deregister_udf(&self, name: &str)
Deregisters a UDF within this context.
Sourcepub fn deregister_udaf(&self, name: &str)
pub fn deregister_udaf(&self, name: &str)
Deregisters a UDAF within this context.
Sourcepub fn deregister_udwf(&self, name: &str)
pub fn deregister_udwf(&self, name: &str)
Deregisters a UDWF within this context.
Sourcepub fn deregister_udtf(&self, name: &str)
pub fn deregister_udtf(&self, name: &str)
Deregisters a UDTF within this context.
Sourcepub async fn read_arrow<P>(
&self,
table_paths: P,
options: ArrowReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
pub async fn read_arrow<P>(
&self,
table_paths: P,
options: ArrowReadOptions<'_>,
) -> Result<DataFrame, DataFusionError>where
P: DataFilePaths,
Creates a DataFrame
for reading an Arrow data source.
For more control such as reading multiple files, you can use
read_table
with a ListingTable
.
For an example, see read_csv
Sourcepub fn read_empty(&self) -> Result<DataFrame, DataFusionError>
pub fn read_empty(&self) -> Result<DataFrame, DataFusionError>
Creates an empty DataFrame.
Sourcepub fn read_table(
&self,
provider: Arc<dyn TableProvider>,
) -> Result<DataFrame, DataFusionError>
pub fn read_table( &self, provider: Arc<dyn TableProvider>, ) -> Result<DataFrame, DataFusionError>
Creates a DataFrame
for a TableProvider
such as a
ListingTable
or a custom user defined provider.
Sourcepub fn read_batch(
&self,
batch: RecordBatch,
) -> Result<DataFrame, DataFusionError>
pub fn read_batch( &self, batch: RecordBatch, ) -> Result<DataFrame, DataFusionError>
Creates a DataFrame
for reading a RecordBatch
Sourcepub fn read_batches(
&self,
batches: impl IntoIterator<Item = RecordBatch>,
) -> Result<DataFrame, DataFusionError>
pub fn read_batches( &self, batches: impl IntoIterator<Item = RecordBatch>, ) -> Result<DataFrame, DataFusionError>
Create a DataFrame
for reading a [Vec[
RecordBatch]
]
Sourcepub async fn register_listing_table(
&self,
table_ref: impl Into<TableReference>,
table_path: impl AsRef<str>,
options: ListingOptions,
provided_schema: Option<Arc<Schema>>,
sql_definition: Option<String>,
) -> Result<(), DataFusionError>
pub async fn register_listing_table( &self, table_ref: impl Into<TableReference>, table_path: impl AsRef<str>, options: ListingOptions, provided_schema: Option<Arc<Schema>>, sql_definition: Option<String>, ) -> Result<(), DataFusionError>
Registers a ListingTable
that can assemble multiple files
from locations in an ObjectStore
instance into a single
table.
This method is async
because it might need to resolve the schema.
Sourcepub async fn register_arrow(
&self,
name: &str,
table_path: &str,
options: ArrowReadOptions<'_>,
) -> Result<(), DataFusionError>
pub async fn register_arrow( &self, name: &str, table_path: &str, options: ArrowReadOptions<'_>, ) -> Result<(), DataFusionError>
Registers an Arrow file as a table that can be referenced from SQL statements executed against this context.
Sourcepub fn register_catalog(
&self,
name: impl Into<String>,
catalog: Arc<dyn CatalogProvider>,
) -> Option<Arc<dyn CatalogProvider>>
pub fn register_catalog( &self, name: impl Into<String>, catalog: Arc<dyn CatalogProvider>, ) -> Option<Arc<dyn CatalogProvider>>
Registers a named catalog using a custom CatalogProvider
so that
it can be referenced from SQL statements executed against this
context.
Returns the CatalogProvider
previously registered for this
name, if any
Sourcepub fn catalog_names(&self) -> Vec<String>
pub fn catalog_names(&self) -> Vec<String>
Retrieves the list of available catalog names.
Sourcepub fn catalog(&self, name: &str) -> Option<Arc<dyn CatalogProvider>>
pub fn catalog(&self, name: &str) -> Option<Arc<dyn CatalogProvider>>
Retrieves a CatalogProvider
instance by name
Sourcepub fn register_table(
&self,
table_ref: impl Into<TableReference>,
provider: Arc<dyn TableProvider>,
) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
pub fn register_table( &self, table_ref: impl Into<TableReference>, provider: Arc<dyn TableProvider>, ) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
Registers a TableProvider
as a table that can be
referenced from SQL statements executed against this context.
If a table of the same name was already registered, returns “Table already exists” error.
Sourcepub fn deregister_table(
&self,
table_ref: impl Into<TableReference>,
) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
pub fn deregister_table( &self, table_ref: impl Into<TableReference>, ) -> Result<Option<Arc<dyn TableProvider>>, DataFusionError>
Deregisters the given table.
Returns the registered provider, if any
Sourcepub fn table_exist(
&self,
table_ref: impl Into<TableReference>,
) -> Result<bool, DataFusionError>
pub fn table_exist( &self, table_ref: impl Into<TableReference>, ) -> Result<bool, DataFusionError>
Return true
if the specified table exists in the schema provider.
Sourcepub async fn table(
&self,
table_ref: impl Into<TableReference>,
) -> Result<DataFrame, DataFusionError>
pub async fn table( &self, table_ref: impl Into<TableReference>, ) -> Result<DataFrame, DataFusionError>
Retrieves a DataFrame
representing a table previously
registered by calling the register_table
function.
Returns an error if no table has been registered with the provided reference.
Sourcepub fn table_function(
&self,
name: &str,
) -> Result<Arc<TableFunction>, DataFusionError>
pub fn table_function( &self, name: &str, ) -> Result<Arc<TableFunction>, DataFusionError>
Retrieves a TableFunction
reference by name.
Returns an error if no table function has been registered with the provided name.
Sourcepub async fn table_provider(
&self,
table_ref: impl Into<TableReference>,
) -> Result<Arc<dyn TableProvider>, DataFusionError>
pub async fn table_provider( &self, table_ref: impl Into<TableReference>, ) -> Result<Arc<dyn TableProvider>, DataFusionError>
Return a TableProvider
for the specified table.
Sourcepub fn task_ctx(&self) -> Arc<TaskContext>
pub fn task_ctx(&self) -> Arc<TaskContext>
Get a new TaskContext to run in this session
Sourcepub fn state(&self) -> SessionState
pub fn state(&self) -> SessionState
Return a new SessionState
suitable for executing a single query.
Notes:
-
query_execution_start_time
is set to the current time for the returned state. -
The returned state is not shared with the current session state and this changes to the returned
SessionState
such as changingConfigOptions
will not be reflected in thisSessionContext
.
Sourcepub fn state_ref(&self) -> Arc<RwLock<RawRwLock, SessionState>>
pub fn state_ref(&self) -> Arc<RwLock<RawRwLock, SessionState>>
Get reference to SessionState
Sourcepub fn state_weak_ref(&self) -> Weak<RwLock<RawRwLock, SessionState>>
pub fn state_weak_ref(&self) -> Weak<RwLock<RawRwLock, SessionState>>
Get weak reference to SessionState
Sourcepub fn register_catalog_list(&self, catalog_list: Arc<dyn CatalogProviderList>)
pub fn register_catalog_list(&self, catalog_list: Arc<dyn CatalogProviderList>)
Register CatalogProviderList
in SessionState
Sourcepub fn register_table_options_extension<T>(&self, extension: T)where
T: ConfigExtension,
pub fn register_table_options_extension<T>(&self, extension: T)where
T: ConfigExtension,
Registers a ConfigExtension
as a table option extension that can be
referenced from SQL statements executed against this context.
Trait Implementations§
Source§impl Clone for ClickHouseSessionContext
impl Clone for ClickHouseSessionContext
Source§fn clone(&self) -> ClickHouseSessionContext
fn clone(&self) -> ClickHouseSessionContext
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source
. Read moreSource§impl Deref for ClickHouseSessionContext
impl Deref for ClickHouseSessionContext
Source§impl From<&SessionContext> for ClickHouseSessionContext
impl From<&SessionContext> for ClickHouseSessionContext
Source§fn from(inner: &SessionContext) -> Self
fn from(inner: &SessionContext) -> Self
Source§impl From<SessionContext> for ClickHouseSessionContext
impl From<SessionContext> for ClickHouseSessionContext
Source§fn from(inner: SessionContext) -> Self
fn from(inner: SessionContext) -> Self
Auto Trait Implementations§
impl Freeze for ClickHouseSessionContext
impl !RefUnwindSafe for ClickHouseSessionContext
impl Send for ClickHouseSessionContext
impl Sync for ClickHouseSessionContext
impl Unpin for ClickHouseSessionContext
impl !UnwindSafe for ClickHouseSessionContext
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self
into a Left
variant of Either<Self, Self>
if into_left
is true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self
into a Left
variant of Either<Self, Self>
if into_left(&self)
returns true
.
Converts self
into a Right
variant of Either<Self, Self>
otherwise. Read more