Modifier and Type | Method and Description |
---|---|
InputRow |
BenchmarkDataGenerator.nextRow() |
Modifier and Type | Class and Description |
---|---|
class |
MapBasedInputRow |
Modifier and Type | Method and Description |
---|---|
InputRow |
FirehoseV2.currRow() |
InputRow |
Firehose.nextRow()
The next row available.
|
InputRow |
AvroStreamInputRowParser.parse(ByteBuffer input) |
InputRow |
ProtoBufInputRowParser.parse(ByteBuffer input) |
InputRow |
AvroHadoopInputRowParser.parse(org.apache.avro.generic.GenericRecord record) |
protected static InputRow |
AvroStreamInputRowParser.parseGenericRecord(org.apache.avro.generic.GenericRecord record,
ParseSpec parseSpec,
List<String> dimensions,
boolean fromPigAvroStorage,
boolean binaryAsString) |
static InputRow |
Rows.toCaseInsensitiveInputRow(Row row,
List<String> dimensions) |
Modifier and Type | Method and Description |
---|---|
static List<Object> |
Rows.toGroupKey(long timeStamp,
InputRow inputRow) |
Modifier and Type | Method and Description |
---|---|
InputRow |
FileIteratingFirehose.nextRow() |
InputRow |
StringInputRowParser.parse(ByteBuffer input) |
InputRow |
NoopInputRowParser.parse(InputRow input) |
InputRow |
MapInputRowParser.parse(Map<String,Object> theMap) |
InputRow |
StringInputRowParser.parse(String input) |
InputRow |
InputRowParser.parse(T input) |
Modifier and Type | Method and Description |
---|---|
InputRow |
NoopInputRowParser.parse(InputRow input) |
Modifier and Type | Method and Description |
---|---|
InputRow |
OrcHadoopInputRowParser.parse(org.apache.hadoop.hive.ql.io.orc.OrcStruct input) |
Modifier and Type | Method and Description |
---|---|
InputRow |
ParquetHadoopInputRowParser.parse(org.apache.avro.generic.GenericRecord record)
|
Modifier and Type | Method and Description |
---|---|
InputRow |
ThriftInputRowParser.parse(Object input) |
Modifier and Type | Method and Description |
---|---|
static InputRow |
InputRowSerde.fromBytes(byte[] data,
AggregatorFactory[] aggs) |
InputRow |
HadoopyStringInputRowParser.parse(Object input) |
static InputRow |
HadoopDruidIndexerMapper.parseInputRow(Object value,
InputRowParser parser) |
Modifier and Type | Method and Description |
---|---|
com.google.common.base.Optional<Bucket> |
HadoopDruidIndexerConfig.getBucket(InputRow inputRow)
Get the proper bucket for some input row.
|
protected void |
IndexGeneratorJob.IndexGeneratorMapper.innerMap(InputRow inputRow,
Object value,
org.apache.hadoop.mapreduce.Mapper.Context context,
boolean reportParseExceptions) |
protected void |
DeterminePartitionsJob.DeterminePartitionsGroupByMapper.innerMap(InputRow inputRow,
Object value,
org.apache.hadoop.mapreduce.Mapper.Context context,
boolean reportParseExceptions) |
protected void |
DeterminePartitionsJob.DeterminePartitionsDimSelectionAssumeGroupedMapper.innerMap(InputRow inputRow,
Object value,
org.apache.hadoop.mapreduce.Mapper.Context context,
boolean reportParseExceptions) |
protected abstract void |
HadoopDruidIndexerMapper.innerMap(InputRow inputRow,
Object value,
org.apache.hadoop.mapreduce.Mapper.Context context,
boolean reportParseExceptions) |
protected void |
DetermineHashedPartitionsJob.DetermineCardinalityMapper.innerMap(InputRow inputRow,
Object value,
org.apache.hadoop.mapreduce.Mapper.Context context,
boolean reportParseExceptions) |
static byte[] |
InputRowSerde.toBytes(InputRow row,
AggregatorFactory[] aggs,
boolean reportParseExceptions) |
Modifier and Type | Class and Description |
---|---|
class |
SegmentInputRow
SegmentInputRow serves as a marker that these InputRow instances have already been combined
and they contain the columns as they show up in the segment after ingestion, not what you would see in raw
data.
|
Modifier and Type | Method and Description |
---|---|
InputRow |
DatasourceRecordReader.getCurrentValue() |
InputRow |
SegmentInputRow.getDelegate() |
Modifier and Type | Method and Description |
---|---|
org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.NullWritable,InputRow> |
DatasourceInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.mapreduce.TaskAttemptContext context) |
Constructor and Description |
---|
SegmentInputRow(InputRow delegate) |
Modifier and Type | Method and Description |
---|---|
InputRow |
SpatialDimensionRowTransformer.apply(InputRow row) |
InputRow |
IncrementalIndex.formatRow(InputRow row) |
Modifier and Type | Method and Description |
---|---|
int |
IncrementalIndex.add(InputRow row)
Adds a new row.
|
protected Integer |
OnheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected Integer |
OffheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected abstract Integer |
IncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
InputRow |
SpatialDimensionRowTransformer.apply(InputRow row) |
InputRow |
IncrementalIndex.formatRow(InputRow row) |
Modifier and Type | Method and Description |
---|---|
protected Integer |
OnheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected Integer |
OnheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected Integer |
OffheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected Integer |
OffheapIncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected abstract Integer |
IncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected abstract Integer |
IncrementalIndex.addToFacts(AggregatorFactory[] metrics,
boolean deserializeComplexMetrics,
boolean reportParseExceptions,
InputRow row,
AtomicInteger numEntries,
IncrementalIndex.TimeAndDims key,
ThreadLocal<InputRow> rowContainer,
com.google.common.base.Supplier<InputRow> rowSupplier) |
protected Aggregator[] |
OnheapIncrementalIndex.initAggs(AggregatorFactory[] metrics,
com.google.common.base.Supplier<InputRow> rowSupplier,
boolean deserializeComplexMetrics) |
protected BufferAggregator[] |
OffheapIncrementalIndex.initAggs(AggregatorFactory[] metrics,
com.google.common.base.Supplier<InputRow> rowSupplier,
boolean deserializeComplexMetrics) |
protected abstract AggregatorType[] |
IncrementalIndex.initAggs(AggregatorFactory[] metrics,
com.google.common.base.Supplier<InputRow> rowSupplier,
boolean deserializeComplexMetrics) |
protected ColumnSelectorFactory |
IncrementalIndex.makeColumnSelectorFactory(AggregatorFactory agg,
com.google.common.base.Supplier<InputRow> in,
boolean deserializeComplexMetrics) |
static ColumnSelectorFactory |
IncrementalIndex.makeColumnSelectorFactory(VirtualColumns virtualColumns,
AggregatorFactory agg,
com.google.common.base.Supplier<InputRow> in,
boolean deserializeComplexMetrics)
Column selector used at ingestion time for inputs to aggregators.
|
Modifier and Type | Method and Description |
---|---|
SegmentIdentifier |
FiniteAppenderatorDriver.add(InputRow row,
String sequenceName,
com.google.common.base.Supplier<Committer> committerSupplier)
Add a row.
|
int |
AppenderatorPlumber.add(InputRow row,
com.google.common.base.Supplier<Committer> committerSupplier) |
int |
AppenderatorImpl.add(SegmentIdentifier identifier,
InputRow row,
com.google.common.base.Supplier<Committer> committerSupplier) |
int |
Appenderator.add(SegmentIdentifier identifier,
InputRow row,
com.google.common.base.Supplier<Committer> committerSupplier)
Add a row.
|
Modifier and Type | Method and Description |
---|---|
InputRow |
IrcDecoder.decodeMessage(org.joda.time.DateTime timestamp,
String channel,
String msg) |
InputRow |
TimedShutoffFirehoseFactory.TimedShutoffFirehose.nextRow() |
InputRow |
ReplayableFirehoseFactory.ReplayableFirehose.nextRow() |
InputRow |
PredicateFirehose.nextRow() |
InputRow |
IngestSegmentFirehose.nextRow() |
InputRow |
EventReceiverFirehoseFactory.EventReceiverFirehose.nextRow() |
InputRow |
CombiningFirehoseFactory.CombiningFirehose.nextRow() |
InputRow |
IrcInputRowParser.parse(Pair<org.joda.time.DateTime,com.ircclouds.irc.api.domain.messages.ChannelPrivMsg> msg) |
Modifier and Type | Method and Description |
---|---|
void |
EventReceiverFirehoseFactory.EventReceiverFirehose.addRows(Iterable<InputRow> rows) |
Constructor and Description |
---|
PredicateFirehose(Firehose firehose,
com.google.common.base.Predicate<InputRow> predicate) |
Modifier and Type | Method and Description |
---|---|
int |
Sink.add(InputRow row) |
int |
RealtimePlumber.add(InputRow row,
com.google.common.base.Supplier<Committer> committerSupplier) |
int |
Plumber.add(InputRow row,
com.google.common.base.Supplier<Committer> committerSupplier) |
Modifier and Type | Method and Description |
---|---|
Object |
ComplexMetricExtractor.extractValue(InputRow inputRow,
String metricName) |
Modifier and Type | Method and Description |
---|---|
ShardSpec |
ShardSpecLookup.getShardSpec(long timestamp,
InputRow row) |
protected int |
HashBasedNumberedShardSpec.hash(long timestamp,
InputRow inputRow) |
boolean |
ShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
boolean |
NoneShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
boolean |
SingleDimensionShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
boolean |
NumberedShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
boolean |
LinearShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
boolean |
HashBasedNumberedShardSpec.isInChunk(long timestamp,
InputRow inputRow) |
Copyright © 2011–2017. All rights reserved.