- serializeTimers(Collection<TimerInternals.TimerData>, TimerInternals.TimerDataCoder) - Static method in class org.apache.beam.runners.spark.stateful.SparkTimerInternals
-
- setBatchIntervalMillis(Long) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setCheckpointDir(String) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setCheckpointDurationMillis(Long) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setCurrentTransform(AppliedPTransform<?, ?, ?>) - Method in class org.apache.beam.runners.spark.translation.EvaluationContext
-
- setEnableSparkMetricSinks(Boolean) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setExpectedAssertions(Integer) - Method in interface org.apache.beam.runners.spark.TestSparkPipelineOptions
-
- setForceStreaming(boolean) - Method in interface org.apache.beam.runners.spark.TestSparkPipelineOptions
-
- setInputFile(String) - Method in interface org.apache.beam.runners.spark.examples.WordCount.WordCountOptions
-
- setListeners(List<JavaStreamingListener>) - Method in interface org.apache.beam.runners.spark.SparkContextOptions
-
- setMaxRecordsPerBatch(Long) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setMinReadTimeMillis(Long) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setName(String) - Method in class org.apache.beam.runners.spark.translation.BoundedDataset
-
- setName(String) - Method in interface org.apache.beam.runners.spark.translation.Dataset
-
- setName(String) - Method in class org.apache.beam.runners.spark.translation.streaming.UnboundedDataset
-
- setOutput(String) - Method in interface org.apache.beam.runners.spark.examples.WordCount.WordCountOptions
-
- setProvidedSparkContext(JavaSparkContext) - Method in interface org.apache.beam.runners.spark.SparkContextOptions
-
- setReadTimePercentage(Double) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setSparkMaster(String) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setStopPipelineWatermark(Long) - Method in interface org.apache.beam.runners.spark.TestSparkPipelineOptions
-
- setStorageLevel(String) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- setTimer(TimerInternals.TimerData) - Method in class org.apache.beam.runners.spark.stateful.SparkTimerInternals
-
- setTimer(StateNamespace, String, Instant, TimeDomain) - Method in class org.apache.beam.runners.spark.stateful.SparkTimerInternals
-
- setUsesProvidedSparkContext(boolean) - Method in interface org.apache.beam.runners.spark.SparkPipelineOptions
-
- shouldDefer(TransformHierarchy.Node) - Method in class org.apache.beam.runners.spark.SparkRunner.Evaluator
-
- SideInputBroadcast<T> - Class in org.apache.beam.runners.spark.util
-
Broadcast helper for side inputs.
- sideInputs - Variable in class org.apache.beam.runners.spark.translation.SparkAbstractCombineFn
-
- SinglePrimitiveOutputPTransform<T> - Class in org.apache.beam.runners.spark.util
-
- SinglePrimitiveOutputPTransform(PTransform<PInput, PCollection<T>>) - Constructor for class org.apache.beam.runners.spark.util.SinglePrimitiveOutputPTransform
-
- skipAssignWindows(Window.Assign<T>, EvaluationContext) - Static method in class org.apache.beam.runners.spark.translation.TranslationUtils
-
Checks if the window transformation should be applied or skipped.
- sortByWindows(Iterable<WindowedValue<T>>) - Static method in class org.apache.beam.runners.spark.translation.SparkAbstractCombineFn
-
- sourceName() - Method in class org.apache.beam.runners.spark.metrics.AggregatorMetricSource
-
- sourceName() - Method in class org.apache.beam.runners.spark.metrics.CompositeSource
-
- sourceName() - Method in class org.apache.beam.runners.spark.metrics.SparkBeamMetricSource
-
- SourceRDD - Class in org.apache.beam.runners.spark.io
-
Classes implementing Beam
Source RDDs.
- SourceRDD() - Constructor for class org.apache.beam.runners.spark.io.SourceRDD
-
- SourceRDD.Bounded<T> - Class in org.apache.beam.runners.spark.io
-
- SourceRDD.Unbounded<T,CheckpointMarkT extends UnboundedSource.CheckpointMark> - Class in org.apache.beam.runners.spark.io
-
- SparkAbstractCombineFn - Class in org.apache.beam.runners.spark.translation
-
- SparkAbstractCombineFn(SparkRuntimeContext, Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>>, WindowingStrategy<?, ?>) - Constructor for class org.apache.beam.runners.spark.translation.SparkAbstractCombineFn
-
- SparkAssignWindowFn<T,W extends BoundedWindow> - Class in org.apache.beam.runners.spark.translation
-
- SparkAssignWindowFn(WindowFn<? super T, W>) - Constructor for class org.apache.beam.runners.spark.translation.SparkAssignWindowFn
-
- SparkBeamMetricSource - Class in org.apache.beam.runners.spark.metrics
-
A Spark
Source that is tailored to expose a
SparkBeamMetric,
wrapping an underlying
MetricResults instance.
- SparkBeamMetricSource(String) - Constructor for class org.apache.beam.runners.spark.metrics.SparkBeamMetricSource
-
- SparkContextFactory - Class in org.apache.beam.runners.spark.translation
-
The Spark context factory.
- SparkContextOptions - Interface in org.apache.beam.runners.spark
-
A custom
PipelineOptions to work with properties related to
JavaSparkContext.
- SparkContextOptions.EmptyListenersList - Class in org.apache.beam.runners.spark
-
Returns an empty list, top avoid handling null.
- SparkGlobalCombineFn<InputT,AccumT,OutputT> - Class in org.apache.beam.runners.spark.translation
-
- SparkGlobalCombineFn(CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT>, SparkRuntimeContext, Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>>, WindowingStrategy<?, ?>) - Constructor for class org.apache.beam.runners.spark.translation.SparkGlobalCombineFn
-
- SparkGroupAlsoByWindowViaOutputBufferFn<K,InputT,W extends BoundedWindow> - Class in org.apache.beam.runners.spark.translation
-
- SparkGroupAlsoByWindowViaOutputBufferFn(WindowingStrategy<?, W>, StateInternalsFactory<K>, SystemReduceFn<K, InputT, Iterable<InputT>, Iterable<InputT>, W>, SparkRuntimeContext, Accumulator<NamedAggregators>) - Constructor for class org.apache.beam.runners.spark.translation.SparkGroupAlsoByWindowViaOutputBufferFn
-
- SparkGroupAlsoByWindowViaWindowSet - Class in org.apache.beam.runners.spark.stateful
-
- SparkGroupAlsoByWindowViaWindowSet() - Constructor for class org.apache.beam.runners.spark.stateful.SparkGroupAlsoByWindowViaWindowSet
-
- SparkKeyedCombineFn<K,InputT,AccumT,OutputT> - Class in org.apache.beam.runners.spark.translation
-
- SparkKeyedCombineFn(CombineWithContext.CombineFnWithContext<InputT, AccumT, OutputT>, SparkRuntimeContext, Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>>, WindowingStrategy<?, ?>) - Constructor for class org.apache.beam.runners.spark.translation.SparkKeyedCombineFn
-
- SparkNativePipelineVisitor - Class in org.apache.beam.runners.spark
-
Pipeline visitor for translating a Beam pipeline into equivalent Spark operations.
- SparkPCollectionView - Class in org.apache.beam.runners.spark.translation
-
SparkPCollectionView is used to pass serialized views to lambdas.
- SparkPCollectionView() - Constructor for class org.apache.beam.runners.spark.translation.SparkPCollectionView
-
- SparkPipelineOptions - Interface in org.apache.beam.runners.spark
-
Spark runner
PipelineOptions handles Spark execution-related configurations,
such as the master address, batch-interval, and other user-related knobs.
- SparkPipelineOptions.TmpCheckpointDirFactory - Class in org.apache.beam.runners.spark
-
Returns the default checkpoint directory of /tmp/${job.name}.
- SparkPipelineResult - Class in org.apache.beam.runners.spark
-
Represents a Spark pipeline execution result.
- SparkPipelineTranslator - Interface in org.apache.beam.runners.spark.translation
-
Translator to support translation between Beam transformations and Spark transformations.
- SparkRunner - Class in org.apache.beam.runners.spark
-
The SparkRunner translate operations defined on a pipeline to a representation executable by
Spark, and then submitting the job to Spark to be executed.
- SparkRunner.Evaluator - Class in org.apache.beam.runners.spark
-
Evaluator on the pipeline.
- SparkRunnerDebugger - Class in org.apache.beam.runners.spark
-
Pipeline runner which translates a Beam pipeline into equivalent Spark operations, without
running them.
- SparkRunnerDebugger.DebugSparkPipelineResult - Class in org.apache.beam.runners.spark
-
- SparkRunnerRegistrar - Class in org.apache.beam.runners.spark
-
- SparkRunnerRegistrar.Options - Class in org.apache.beam.runners.spark
-
- SparkRunnerRegistrar.Runner - Class in org.apache.beam.runners.spark
-
- SparkRunnerStreamingContextFactory - Class in org.apache.beam.runners.spark.translation.streaming
-
A JavaStreamingContext factory for resilience.
- SparkRunnerStreamingContextFactory(Pipeline, SparkPipelineOptions, Checkpoint.CheckpointDir) - Constructor for class org.apache.beam.runners.spark.translation.streaming.SparkRunnerStreamingContextFactory
-
- SparkRuntimeContext - Class in org.apache.beam.runners.spark.translation
-
The SparkRuntimeContext allows us to define useful features on the client side before our
data flow program is launched.
- SparkSideInputReader - Class in org.apache.beam.runners.spark.util
-
- SparkSideInputReader(Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>>) - Constructor for class org.apache.beam.runners.spark.util.SparkSideInputReader
-
- SparkTimerInternals - Class in org.apache.beam.runners.spark.stateful
-
- SparkUnboundedSource - Class in org.apache.beam.runners.spark.io
-
- SparkUnboundedSource() - Constructor for class org.apache.beam.runners.spark.io.SparkUnboundedSource
-
- SparkUnboundedSource.Metadata - Class in org.apache.beam.runners.spark.io
-
A metadata holder for an input stream partition.
- SparkWatermarks(Instant, Instant, Instant) - Constructor for class org.apache.beam.runners.spark.util.GlobalWatermarkHolder.SparkWatermarks
-
- start() - Method in class org.apache.beam.runners.spark.io.MicrobatchSource.Reader
-
- state - Variable in class org.apache.beam.runners.spark.SparkPipelineResult
-
- StateSpecFunctions - Class in org.apache.beam.runners.spark.stateful
-
A class containing StateSpec mappingFunctions.
- StateSpecFunctions() - Constructor for class org.apache.beam.runners.spark.stateful.StateSpecFunctions
-
- stop() - Method in class org.apache.beam.runners.spark.SparkPipelineResult
-
- stop() - Method in class org.apache.beam.runners.spark.SparkRunnerDebugger.DebugSparkPipelineResult
-
- stopSparkContext(JavaSparkContext) - Static method in class org.apache.beam.runners.spark.translation.SparkContextFactory
-
- StorageLevelPTransform - Class in org.apache.beam.runners.spark.translation
-
Get RDD storage level for the input PCollection (mostly used for testing purpose).
- StorageLevelPTransform() - Constructor for class org.apache.beam.runners.spark.translation.StorageLevelPTransform
-
- StreamingTransformTranslator - Class in org.apache.beam.runners.spark.translation.streaming
-
Supports translation between a Beam transform, and Spark's operations on DStreams.
- StreamingTransformTranslator.Translator - Class in org.apache.beam.runners.spark.translation.streaming
-
Translator matches Beam transformation with the appropriate evaluator.