public class JobHelper extends Object
Modifier and Type | Class and Description |
---|---|
static interface |
JobHelper.DataPusher
Simple interface for retry operations
|
Constructor and Description |
---|
JobHelper() |
Modifier and Type | Method and Description |
---|---|
static long |
copyFileToZipStream(File file,
ZipOutputStream zipOutputStream,
org.apache.hadoop.util.Progressable progressable) |
static org.apache.hadoop.fs.Path |
distributedClassPath(org.apache.hadoop.fs.Path base) |
static org.apache.hadoop.fs.Path |
distributedClassPath(String path) |
static void |
ensurePaths(HadoopDruidIndexerConfig config) |
static URI |
getURIFromSegment(io.druid.timeline.DataSegment dataSegment) |
static org.apache.hadoop.conf.Configuration |
injectSystemProperties(org.apache.hadoop.conf.Configuration conf) |
static void |
injectSystemProperties(org.apache.hadoop.mapreduce.Job job) |
static org.apache.hadoop.fs.Path |
makeSegmentOutputPath(org.apache.hadoop.fs.Path basePath,
org.apache.hadoop.fs.FileSystem fileSystem,
io.druid.timeline.DataSegment segment) |
static org.apache.hadoop.fs.Path |
prependFSIfNullScheme(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path) |
static ProgressIndicator |
progressIndicatorForContext(org.apache.hadoop.mapreduce.TaskAttemptContext context) |
static boolean |
runJobs(List<Jobby> jobs,
HadoopDruidIndexerConfig config) |
static io.druid.timeline.DataSegment |
serializeOutIndex(io.druid.timeline.DataSegment segmentTemplate,
org.apache.hadoop.conf.Configuration configuration,
org.apache.hadoop.util.Progressable progressable,
org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID,
File mergedBase,
org.apache.hadoop.fs.Path segmentBasePath) |
static void |
setupClasspath(org.apache.hadoop.fs.Path distributedClassPath,
org.apache.hadoop.fs.Path intermediateClassPath,
org.apache.hadoop.mapreduce.Job job)
Uploads jar files to hdfs and configures the classpath.
|
static com.google.common.base.Predicate<Throwable> |
shouldRetryPredicate() |
static long |
unzipNoGuava(org.apache.hadoop.fs.Path zip,
org.apache.hadoop.conf.Configuration configuration,
File outDir,
org.apache.hadoop.util.Progressable progressable) |
static void |
writeSegmentDescriptor(org.apache.hadoop.fs.FileSystem outputFS,
io.druid.timeline.DataSegment segment,
org.apache.hadoop.fs.Path descriptorPath,
org.apache.hadoop.util.Progressable progressable) |
static long |
zipAndCopyDir(File baseDir,
OutputStream baseOutputStream,
org.apache.hadoop.util.Progressable progressable) |
public static org.apache.hadoop.fs.Path distributedClassPath(String path)
public static org.apache.hadoop.fs.Path distributedClassPath(org.apache.hadoop.fs.Path base)
public static void setupClasspath(org.apache.hadoop.fs.Path distributedClassPath, org.apache.hadoop.fs.Path intermediateClassPath, org.apache.hadoop.mapreduce.Job job) throws IOException
distributedClassPath
- classpath shared across multiple jobsintermediateClassPath
- classpath exclusive for this job. used to upload SNAPSHOT jar files.job
- job to runIOException
public static final com.google.common.base.Predicate<Throwable> shouldRetryPredicate()
public static void injectSystemProperties(org.apache.hadoop.mapreduce.Job job)
public static org.apache.hadoop.conf.Configuration injectSystemProperties(org.apache.hadoop.conf.Configuration conf)
public static void ensurePaths(HadoopDruidIndexerConfig config)
public static boolean runJobs(List<Jobby> jobs, HadoopDruidIndexerConfig config)
public static io.druid.timeline.DataSegment serializeOutIndex(io.druid.timeline.DataSegment segmentTemplate, org.apache.hadoop.conf.Configuration configuration, org.apache.hadoop.util.Progressable progressable, org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID, File mergedBase, org.apache.hadoop.fs.Path segmentBasePath) throws IOException
IOException
public static void writeSegmentDescriptor(org.apache.hadoop.fs.FileSystem outputFS, io.druid.timeline.DataSegment segment, org.apache.hadoop.fs.Path descriptorPath, org.apache.hadoop.util.Progressable progressable) throws IOException
IOException
public static long zipAndCopyDir(File baseDir, OutputStream baseOutputStream, org.apache.hadoop.util.Progressable progressable) throws IOException
IOException
public static long copyFileToZipStream(File file, ZipOutputStream zipOutputStream, org.apache.hadoop.util.Progressable progressable) throws IOException
IOException
public static org.apache.hadoop.fs.Path makeSegmentOutputPath(org.apache.hadoop.fs.Path basePath, org.apache.hadoop.fs.FileSystem fileSystem, io.druid.timeline.DataSegment segment)
public static org.apache.hadoop.fs.Path prependFSIfNullScheme(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path)
public static long unzipNoGuava(org.apache.hadoop.fs.Path zip, org.apache.hadoop.conf.Configuration configuration, File outDir, org.apache.hadoop.util.Progressable progressable) throws IOException
IOException
public static URI getURIFromSegment(io.druid.timeline.DataSegment dataSegment)
public static ProgressIndicator progressIndicatorForContext(org.apache.hadoop.mapreduce.TaskAttemptContext context)
Copyright © 2011–2016. All rights reserved.