Class ProtobufTable
java.lang.Object
org.apache.spark.sql.execution.datasources.v2.FileTable
com.here.platform.data.client.spark.datasources.protobuf.ProtobufTable
- All Implemented Interfaces:
Serializable,org.apache.spark.sql.connector.catalog.SupportsRead,org.apache.spark.sql.connector.catalog.SupportsWrite,org.apache.spark.sql.connector.catalog.Table,scala.Equals,scala.Product
public class ProtobufTable
extends org.apache.spark.sql.execution.datasources.v2.FileTable
implements scala.Product, Serializable
- See Also:
-
Constructor Summary
ConstructorsConstructorDescriptionProtobufTable(String name, org.apache.spark.sql.SparkSession sparkSession, org.apache.spark.sql.util.CaseInsensitiveStringMap options, scala.collection.immutable.Seq<String> paths, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, Class<? extends org.apache.spark.sql.execution.datasources.FileFormat> fallbackFileFormat) -
Method Summary
Modifier and TypeMethodDescriptionabstract static Rapply(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6) Class<? extends org.apache.spark.sql.execution.datasources.FileFormat>scala.Option<org.apache.spark.sql.types.StructType>inferSchema(scala.collection.immutable.Seq<org.apache.hadoop.fs.FileStatus> files) name()org.apache.spark.sql.connector.read.ScanBuildernewScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) org.apache.spark.sql.connector.write.WriteBuildernewWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) org.apache.spark.sql.util.CaseInsensitiveStringMapoptions()scala.collection.immutable.Seq<String>paths()org.apache.spark.sql.SparkSessionstatic StringtoString()scala.Option<org.apache.spark.sql.types.StructType>Methods inherited from class org.apache.spark.sql.execution.datasources.v2.FileTable
capabilities, columns, dataSchema, fileIndex, mergedOptions, mergedWriteInfo, partitioning, properties, schema, supportsDataTypeMethods inherited from class java.lang.Object
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitMethods inherited from interface scala.Equals
canEqual, equalsMethods inherited from interface scala.Product
productArity, productElement, productElementName, productElementNames, productIterator, productPrefix
-
Constructor Details
-
ProtobufTable
public ProtobufTable(String name, org.apache.spark.sql.SparkSession sparkSession, org.apache.spark.sql.util.CaseInsensitiveStringMap options, scala.collection.immutable.Seq<String> paths, scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema, Class<? extends org.apache.spark.sql.execution.datasources.FileFormat> fallbackFileFormat)
-
-
Method Details
-
apply
public abstract static R apply(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5, T6 v6) -
toString
-
name
- Specified by:
namein interfaceorg.apache.spark.sql.connector.catalog.Table
-
sparkSession
public org.apache.spark.sql.SparkSession sparkSession() -
options
public org.apache.spark.sql.util.CaseInsensitiveStringMap options() -
paths
-
userSpecifiedSchema
public scala.Option<org.apache.spark.sql.types.StructType> userSpecifiedSchema() -
fallbackFileFormat
- Specified by:
fallbackFileFormatin classorg.apache.spark.sql.execution.datasources.v2.FileTable
-
inferSchema
public scala.Option<org.apache.spark.sql.types.StructType> inferSchema(scala.collection.immutable.Seq<org.apache.hadoop.fs.FileStatus> files) - Specified by:
inferSchemain classorg.apache.spark.sql.execution.datasources.v2.FileTable
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) - Specified by:
newScanBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) - Specified by:
newWriteBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
formatName
- Specified by:
formatNamein classorg.apache.spark.sql.execution.datasources.v2.FileTable
-