Examples of CopyWork


Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      addPartitionDesc.setLocation(tgtPath.toString());
      LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
          + partSpecToString(addPartitionDesc.getPartSpec())
          + " with source location: " + srcLocation);
      String tmpURI = ctx.getExternalTmpFileURI(fromURI);
      Task<?> copyTask = TaskFactory.get(new CopyWork(srcLocation,
          tmpURI, false), conf);
      Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
          getOutputs(), addPartitionDesc), conf);
      LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI,
          ctx.getExternalTmpFileURI(fromURI),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      // might seem redundant in the case
      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpFileURI(toURI);
      URI copyURI = URI.create(copyURIStr);
      rTask = TaskFactory.get(new CopyWork(fromURI.toString(), copyURIStr),
          conf);
      fromURI = copyURI;
    }

    // create final load/move work
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      if (ts.tableHandle.isPartitioned()) {
        partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
      }
      Path path = new Path(ctx.getLocalTmpPath(), "_metadata");
      EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          path, new Path(toURI), false), conf);
      rootTasks.add(rTask);
      LOG.debug("_metadata file written into " + path.toString()
          + " and then copied to " + toURI.toString());
    } catch (Exception e) {
      throw new SemanticException(
          ErrorMsg.GENERIC_ERROR
              .getMsg("Exception while writing out the local file"), e);
    }

    Path parentPath = new Path(toURI);

    if (ts.tableHandle.isPartitioned()) {
      for (Partition partition : partitions) {
        Path fromPath = partition.getDataLocation();
        Path toPartPath = new Path(parentPath, partition.getName());
        Task<? extends Serializable> rTask = TaskFactory.get(
            new CopyWork(fromPath, toPartPath, false),
            conf);
        rootTasks.add(rTask);
        inputs.add(new ReadEntity(partition));
      }
    } else {
      Path fromPath = ts.tableHandle.getDataLocation();
      Path toDataPath = new Path(parentPath, "data");
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          fromPath, toDataPath, false), conf);
      rootTasks.add(rTask);
      inputs.add(new ReadEntity(ts.tableHandle));
    }
    outputs.add(new WriteEntity(parentPath, toURI.getScheme().equals("hdfs")));
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

  }

  private Task<?> loadTable(URI fromURI, Table table) {
    Path dataPath = new Path(fromURI.toString(), "data");
    Path tmpPath = ctx.getExternalTmpPath(fromURI);
    Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath,
       tmpPath, false), conf);
    LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
        Utilities.getTableDesc(table), new TreeMap<String, String>(),
        false);
    Task<?> loadTableTask = TaskFactory.get(new MoveWork(getInputs(),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      partSpec.setLocation(tgtPath.toString());
      LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
          + partSpecToString(partSpec.getPartSpec())
          + " with source location: " + srcLocation);
      Path tmpPath = ctx.getExternalTmpPath(fromURI);
      Task<?> copyTask = TaskFactory.get(new CopyWork(new Path(srcLocation),
          tmpPath, false), conf);
      Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
          getOutputs(), addPartitionDesc), conf);
      LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
          Utilities.getTableDesc(table),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      // might seem redundant in the case
      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpPath(toURI).toString();
      URI copyURI = URI.create(copyURIStr);
      rTask = TaskFactory.get(new CopyWork(new Path(fromURI), new Path(copyURI)), conf);
      fromURI = copyURI;
    }

    // create final load/move work
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      // might seem redundant in the case
      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpFileURI(toURI);
      URI copyURI = URI.create(copyURIStr);
      rTask = TaskFactory.get(new CopyWork(fromURI.toString(), copyURIStr),
          conf);
      fromURI = copyURI;
    }

    // create final load/move work
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

  }

  private Task<?> loadTable(URI fromURI, Table table) {
    Path dataPath = new Path(fromURI.toString(), "data");
    Path tmpPath = ctx.getExternalTmpPath(new Path(fromURI));
    Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath,
       tmpPath, false), conf);
    LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
        Utilities.getTableDesc(table), new TreeMap<String, String>(),
        false);
    Task<?> loadTableTask = TaskFactory.get(new MoveWork(getInputs(),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      partSpec.setLocation(tgtPath.toString());
      LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
          + partSpecToString(partSpec.getPartSpec())
          + " with source location: " + srcLocation);
      Path tmpPath = ctx.getExternalTmpPath(new Path(fromURI));
      Task<?> copyTask = TaskFactory.get(new CopyWork(new Path(srcLocation),
          tmpPath, false), conf);
      Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
          getOutputs(), addPartitionDesc), conf);
      LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
          Utilities.getTableDesc(table),
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.CopyWork

      if (ts.tableHandle.isPartitioned()) {
        partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
      }
      Path path = new Path(ctx.getLocalTmpPath(), "_metadata");
      EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          path, new Path(toURI), false), conf);
      rootTasks.add(rTask);
      LOG.debug("_metadata file written into " + path.toString()
          + " and then copied to " + toURI.toString());
    } catch (Exception e) {
      throw new SemanticException(
          ErrorMsg.GENERIC_ERROR
              .getMsg("Exception while writing out the local file"), e);
    }

    Path parentPath = new Path(toURI);

    if (ts.tableHandle.isPartitioned()) {
      for (Partition partition : partitions) {
        Path fromPath = partition.getDataLocation();
        Path toPartPath = new Path(parentPath, partition.getName());
        Task<? extends Serializable> rTask = TaskFactory.get(
            new CopyWork(fromPath, toPartPath, false),
            conf);
        rootTasks.add(rTask);
        inputs.add(new ReadEntity(partition));
      }
    } else {
      Path fromPath = ts.tableHandle.getDataLocation();
      Path toDataPath = new Path(parentPath, "data");
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          fromPath, toDataPath, false), conf);
      rootTasks.add(rTask);
      inputs.add(new ReadEntity(ts.tableHandle));
    }
    boolean isLocal = FileUtils.isLocalFile(conf, toURI);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.