public class RowWriteSupport extends parquet.hadoop.api.WriteSupport<Row> implements Logging
parquet.hadoop.api.WriteSupport
for Row ojects.Constructor and Description |
---|
RowWriteSupport() |
Modifier and Type | Method and Description |
---|---|
org.apache.spark.sql.catalyst.expressions.Attribute[] |
attributes() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
getSchema(org.apache.hadoop.conf.Configuration configuration) |
parquet.hadoop.api.WriteSupport.WriteContext |
init(org.apache.hadoop.conf.Configuration configuration) |
void |
prepareForWrite(parquet.io.api.RecordConsumer recordConsumer) |
static void |
setSchema(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema,
org.apache.hadoop.conf.Configuration configuration) |
static String |
SPARK_ROW_SCHEMA() |
void |
write(Row record) |
void |
writeArray(ArrayType schema,
scala.collection.Seq<Object> array) |
void |
writeDecimal(Decimal decimal,
int precision) |
void |
writeMap(MapType schema,
scala.collection.immutable.Map<?,Object> map) |
void |
writePrimitive(DataType schema,
Object value) |
parquet.io.api.RecordConsumer |
writer() |
void |
writeStruct(StructType schema,
Row struct) |
void |
writeTimestamp(java.sql.Timestamp ts) |
void |
writeValue(DataType schema,
Object value) |
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public static String SPARK_ROW_SCHEMA()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> getSchema(org.apache.hadoop.conf.Configuration configuration)
public static void setSchema(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> schema, org.apache.hadoop.conf.Configuration configuration)
public parquet.io.api.RecordConsumer writer()
public org.apache.spark.sql.catalyst.expressions.Attribute[] attributes()
public parquet.hadoop.api.WriteSupport.WriteContext init(org.apache.hadoop.conf.Configuration configuration)
init
in class parquet.hadoop.api.WriteSupport<Row>
public void prepareForWrite(parquet.io.api.RecordConsumer recordConsumer)
prepareForWrite
in class parquet.hadoop.api.WriteSupport<Row>
public void write(Row record)
write
in class parquet.hadoop.api.WriteSupport<Row>
public void writeValue(DataType schema, Object value)
public void writePrimitive(DataType schema, Object value)
public void writeStruct(StructType schema, Row struct)
public void writeArray(ArrayType schema, scala.collection.Seq<Object> array)
public void writeMap(MapType schema, scala.collection.immutable.Map<?,Object> map)
public void writeDecimal(Decimal decimal, int precision)
public void writeTimestamp(java.sql.Timestamp ts)