Package | Description |
---|---|
io.druid.indexer | |
io.druid.indexer.updater | |
io.druid.indexing.common.task | |
io.druid.segment | |
io.druid.segment.indexing |
Modifier and Type | Method and Description |
---|---|
IndexSpec |
HadoopTuningConfig.getIndexSpec() |
IndexSpec |
HadoopDruidIndexerConfig.getIndexSpec() |
Constructor and Description |
---|
HadoopTuningConfig(String workingPath,
String version,
PartitionsSpec partitionsSpec,
Map<org.joda.time.DateTime,List<HadoopyShardSpec>> shardSpecs,
IndexSpec indexSpec,
Integer maxRowsInMemory,
boolean leaveIntermediate,
Boolean cleanupOnFailure,
boolean overwriteFiles,
boolean ignoreInvalidRows,
Map<String,String> jobProperties,
boolean combineText,
boolean persistInHeap,
boolean ingestOffheap,
Integer bufferSize,
Float aggregationBufferRatio,
Boolean useCombiner) |
Modifier and Type | Method and Description |
---|---|
IndexSpec |
HadoopDruidConverterConfig.getIndexSpec() |
Constructor and Description |
---|
HadoopDruidConverterConfig(String dataSource,
org.joda.time.Interval interval,
IndexSpec indexSpec,
List<io.druid.timeline.DataSegment> segments,
Boolean validate,
URI distributedSuccessCache,
Map<String,String> hadoopProperties,
String jobPriority,
String segmentOutputPath) |
Modifier and Type | Method and Description |
---|---|
IndexSpec |
IndexTask.IndexTuningConfig.getIndexSpec() |
IndexSpec |
ConvertSegmentTask.getIndexSpec() |
Modifier and Type | Method and Description |
---|---|
static ConvertSegmentTask |
ConvertSegmentTask.create(io.druid.timeline.DataSegment segment,
IndexSpec indexSpec,
boolean force,
boolean validate)
Create a task to update the segment specified to the most recent binary version with the specified indexSpec
|
static ConvertSegmentTask |
ConvertSegmentTask.create(String dataSource,
org.joda.time.Interval interval,
IndexSpec indexSpec,
boolean force,
boolean validate)
Create a segment converter task to convert a segment to the most recent version including the specified indexSpec
|
protected Iterable<Task> |
HadoopConverterTask.generateSubTasks(String groupId,
Iterable<io.druid.timeline.DataSegment> segments,
IndexSpec indexSpec,
boolean force,
boolean validate) |
protected Iterable<Task> |
ConvertSegmentTask.generateSubTasks(String groupId,
Iterable<io.druid.timeline.DataSegment> segments,
IndexSpec indexSpec,
boolean force,
boolean validate) |
Constructor and Description |
---|
AppendTask(String id,
String dataSource,
List<io.druid.timeline.DataSegment> segments,
IndexSpec indexSpec) |
ConvertSegmentTask.SubTask(String groupId,
io.druid.timeline.DataSegment segment,
IndexSpec indexSpec,
Boolean force,
Boolean validate) |
ConvertSegmentTask(String id,
String dataSource,
org.joda.time.Interval interval,
io.druid.timeline.DataSegment segment,
IndexSpec indexSpec,
boolean force,
boolean validate) |
HadoopConverterTask(String id,
String dataSource,
org.joda.time.Interval interval,
IndexSpec indexSpec,
boolean force,
Boolean validate,
List<String> hadoopDependencyCoordinates,
URI distributedSuccessCache,
String jobPriority,
String segmentOutputPath,
String classpathPrefix) |
IndexTask.IndexTuningConfig(int targetPartitionSize,
int rowFlushBoundary,
Integer numShards,
IndexSpec indexSpec) |
MergeTask(String id,
String dataSource,
List<io.druid.timeline.DataSegment> segments,
List<AggregatorFactory> aggregators,
IndexSpec indexSpec) |
Modifier and Type | Method and Description |
---|---|
static File |
IndexMerger.append(List<IndexableAdapter> indexes,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMaker.append(List<IndexableAdapter> adapters,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMerger.append(List<IndexableAdapter> indexes,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMaker.append(List<IndexableAdapter> adapters,
File outDir,
ProgressIndicator progress,
IndexSpec indexSpec) |
static File |
IndexMerger.convert(File inDir,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMaker.convert(File inDir,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMerger.convert(File inDir,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMaker.convert(File inDir,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static boolean |
IndexIO.convertSegment(File toConvert,
File converted,
IndexSpec indexSpec) |
static boolean |
IndexIO.convertSegment(File toConvert,
File converted,
IndexSpec indexSpec,
boolean forceIfCurrent,
boolean validate) |
static void |
IndexIO.DefaultIndexIOHandler.convertV8toV9(File v8Dir,
File v9Dir,
IndexSpec indexSpec) |
static File |
IndexMerger.merge(List<IndexableAdapter> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMaker.merge(List<IndexableAdapter> adapters,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMerger.merge(List<IndexableAdapter> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMaker.merge(List<IndexableAdapter> adapters,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMerger.mergeQueryableIndex(List<QueryableIndex> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMaker.mergeQueryableIndex(List<QueryableIndex> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMerger.mergeQueryableIndex(List<QueryableIndex> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMaker.mergeQueryableIndex(List<QueryableIndex> indexes,
AggregatorFactory[] metricAggs,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMerger.persist(IncrementalIndex index,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMaker.persist(IncrementalIndex index,
File outDir,
IndexSpec indexSpec) |
static File |
IndexMerger.persist(IncrementalIndex index,
org.joda.time.Interval dataInterval,
File outDir,
IndexSpec indexSpec)
This is *not* thread-safe and havok will ensue if this is called and writes are still occurring
on the IncrementalIndex object.
|
static File |
IndexMaker.persist(IncrementalIndex index,
org.joda.time.Interval dataInterval,
File outDir,
IndexSpec indexSpec)
This is *not* thread-safe and havok will ensue if this is called and writes are still occurring
on the IncrementalIndex object.
|
static File |
IndexMerger.persist(IncrementalIndex index,
org.joda.time.Interval dataInterval,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
static File |
IndexMaker.persist(IncrementalIndex index,
org.joda.time.Interval dataInterval,
File outDir,
IndexSpec indexSpec,
ProgressIndicator progress) |
Modifier and Type | Method and Description |
---|---|
IndexSpec |
RealtimeTuningConfig.getIndexSpec() |
Constructor and Description |
---|
RealtimeTuningConfig(Integer maxRowsInMemory,
org.joda.time.Period intermediatePersistPeriod,
org.joda.time.Period windowPeriod,
File basePersistDirectory,
VersioningPolicy versioningPolicy,
RejectionPolicyFactory rejectionPolicyFactory,
Integer maxPendingPersists,
io.druid.timeline.partition.ShardSpec shardSpec,
IndexSpec indexSpec,
Boolean persistInHeap,
Boolean ingestOffheap,
Integer bufferSize,
Float aggregationBufferRatio) |
Copyright © 2011–2015. All rights reserved.