Package org.apache.hadoop.io.SequenceFile

Examples of org.apache.hadoop.io.SequenceFile.CompressionType


          Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) Class.forName(codecStr);
          FileOutputFormat.setOutputCompressorClass(jc_output, codec);
        }
        String type = conf.getCompressType();
        if(type !=null && !type.trim().equals("")) {
          CompressionType style = CompressionType.valueOf(type);
          SequenceFileOutputFormat.setOutputCompressionType(jc, style);
        }
      }
      outWriter = getRecordWriter(jc_output, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath);
View Full Code Here


                                                         Path file, Class<?> keyClass,
                                                         Class<?> valClass,
                                                         boolean isCompressed)
    throws IOException {
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    Class codecClass = null;
    if (isCompressed) {
      compressionType = SequenceFileOutputFormat.getOutputCompressionType(jc);
      codecClass = SequenceFileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
      codec = (CompressionCodec)
View Full Code Here

          if (slice == DEFAULT_SLICE) {
            wname = new Path(new Path(new Path(out, segmentName), dirName), name);
          } else {
            wname = new Path(new Path(new Path(out, segmentName + "-" + slice), dirName), name);
          }
          CompressionType compType =
              SequenceFileOutputFormat.getOutputCompressionType(job);
          if (clazz.isAssignableFrom(ParseText.class)) {
            compType = CompressionType.RECORD;
          }
          res = new MapFile.Writer(job, fs, wname.toString(), Text.class, clazz, compType, progress);
View Full Code Here

    final int interval = job.getInt("db.fetch.interval.default", 2592000);
    final boolean ignoreExternalLinks = job.getBoolean("db.ignore.external.links", false);
    int maxOutlinksPerPage = job.getInt("db.max.outlinks.per.page", 100);
    final int maxOutlinks = (maxOutlinksPerPage < 0) ? Integer.MAX_VALUE
                                                     : maxOutlinksPerPage;
    final CompressionType compType = SequenceFileOutputFormat.getOutputCompressionType(job);
    Path out = FileOutputFormat.getOutputPath(job);
   
    Path text = new Path(new Path(out, ParseText.DIR_NAME), name);
    Path data = new Path(new Path(out, ParseData.DIR_NAME), name);
    Path crawl = new Path(new Path(out, CrawlDatum.PARSE_DIR_NAME), name);
View Full Code Here

      // get the path of the temporary output file
      Path file = getDefaultWorkFile(job, "");
     
      FileSystem fs = file.getFileSystem(job.getConfiguration());
      CompressionCodec codec = null;
      CompressionType compressionType = CompressionType.NONE;
      if (getCompressOutput(job)) {
        // find the kind of compression to do
        compressionType = SequenceFileOutputFormat.getOutputCompressionType(job);
       
        // find the right codec
View Full Code Here

              .forName(codecStr);
          FileOutputFormat.setOutputCompressorClass(jc_output, codec);
        }
        String type = conf.getCompressType();
        if (type != null && !type.trim().equals("")) {
          CompressionType style = CompressionType.valueOf(type);
          SequenceFileOutputFormat.setOutputCompressionType(jc, style);
        }
      }
      return getRecordWriter(jc_output, hiveOutputFormat, outputClass,
          isCompressed, tableInfo.getProperties(), outPath, reporter);
View Full Code Here

   * @return output stream over the created sequencefile
   */
  public static SequenceFile.Writer createSequenceWriter(JobConf jc, FileSystem fs, Path file,
      Class<?> keyClass, Class<?> valClass, boolean isCompressed) throws IOException {
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    Class codecClass = null;
    if (isCompressed) {
      compressionType = SequenceFileOutputFormat.getOutputCompressionType(jc);
      codecClass = FileOutputFormat.getOutputCompressorClass(jc, DefaultCodec.class);
      codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, jc);
View Full Code Here

        if (rwonly && (!create || merge || fast)) {
          System.err.println(usage);
          System.exit(-1);
        }

        CompressionType compressionType =
          CompressionType.valueOf(compressType);
        CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(
                                    conf.getClassByName(compressionCodec),
                                    conf);
View Full Code Here

    final boolean ignoreExternalLinks = job.getBoolean("db.ignore.external.links", false);
    int maxOutlinksPerPage = job.getInt("db.max.outlinks.per.page", 100);
    final boolean isParsing = job.getBoolean("fetcher.parse", true);
    final int maxOutlinks = (maxOutlinksPerPage < 0) ? Integer.MAX_VALUE
                                                     : maxOutlinksPerPage;
    final CompressionType compType = SequenceFileOutputFormat.getOutputCompressionType(job);
    Path out = FileOutputFormat.getOutputPath(job);
   
    Path text = new Path(new Path(out, ParseText.DIR_NAME), name);
    Path data = new Path(new Path(out, ParseData.DIR_NAME), name);
    Path crawl = new Path(new Path(out, CrawlDatum.PARSE_DIR_NAME), name);
View Full Code Here

          if (slice == DEFAULT_SLICE) {
            wname = new Path(new Path(new Path(out, segmentName), dirName), name);
          } else {
            wname = new Path(new Path(new Path(out, segmentName + "-" + slice), dirName), name);
          }
          CompressionType compType =
              SequenceFileOutputFormat.getOutputCompressionType(job);
          if (clazz.isAssignableFrom(ParseText.class)) {
            compType = CompressionType.RECORD;
          }
          res = new MapFile.Writer(job, fs, wname.toString(), Text.class, clazz, compType, progress);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.SequenceFile.CompressionType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.