Examples of Append


Examples of org.apache.hadoop.hbase.client.Append

    }
    requestCount.incrementAndGet();
    try {
      HRegion region = getRegion(regionName);
      Integer lock = getLockFromId(append.getLockId());
      Append appVal = append;
      Result resVal;
      if (region.getCoprocessorHost() != null) {
        resVal = region.getCoprocessorHost().preAppend(appVal);
        if (resVal != null) {
          return resVal;
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

 
      Get g = new Get(rk.getBytes());
      g.addColumn(cf.getBytes(), appendCol.getBytes());
      hTable.get(g);
 
      Append a = new Append(rk.getBytes());
      a.add(cf.getBytes(), appendCol.getBytes(), Bytes.toBytes("-APPEND"));
      hTable.append(a);
 
      Delete dOne = new Delete(rk.getBytes());
      dOne.deleteFamily(cf.getBytes());
      hTable.delete(dOne);
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    @Override
    public void run() {
      int count = 0;
      while (count < appendCounter) {
        Append app = new Append(appendRow);
        app.add(family, qualifier, CHAR);
        count++;
        try {
          region.append(app, null, true);
        } catch (IOException e) {
          e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    // set the default value for equal comparison
    mutateBuilder = MutationProto.newBuilder(proto);
    mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);

    Append append = ProtobufUtil.toAppend(proto, null);

    // append always use the latest timestamp,
    // add the timestamp to the original mutate
    long timestamp = append.getTimeStamp();
    mutateBuilder.setTimestamp(timestamp);
    for (ColumnValue.Builder column: mutateBuilder.getColumnValueBuilderList()) {
      for (QualifierValue.Builder qualifier:
          column.getQualifierValueBuilderList()) {
        qualifier.setTimestamp(timestamp);
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

      public Object run() throws Exception {
        byte[] row = Bytes.toBytes("random_row");
        byte[] qualifier = Bytes.toBytes("q");
        Put put = new Put(row);
        put.add(TEST_FAMILY, qualifier, Bytes.toBytes(1));
        Append append = new Append(row);
        append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2));
        HTable t = new HTable(conf, TEST_TABLE.getTableName());
        try {
          t.put(put);
          t.append(append);
        } finally {
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

  @Test
  public void testAppendWithReadOnlyTable() throws Exception {
    byte[] TABLE = Bytes.toBytes("readOnlyTable");
    this.region = initHRegion(TABLE, getName(), conf, true, Bytes.toBytes("somefamily"));
    boolean exceptionCaught = false;
    Append append = new Append(Bytes.toBytes("somerow"));
    append.setDurability(Durability.SKIP_WAL);
    append.add(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
        Bytes.toBytes("somevalue"));
    try {
      region.append(append);
    } catch (IOException e) {
      exceptionCaught = true;
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    @Override
    public void run() {
      int count = 0;
      while (count < appendCounter) {
        Append app = new Append(appendRow);
        app.add(family, qualifier, CHAR);
        count++;
        try {
          region.append(app);
        } catch (IOException e) {
          e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

  public static Append toAppend(final MutationProto proto, final CellScanner cellScanner)
  throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.APPEND : type.name();
    byte [] row = proto.hasRow()? proto.getRow().toByteArray(): null;
    Append append = null;
    int cellCount = proto.hasAssociatedCellCount()? proto.getAssociatedCellCount(): 0;
    if (cellCount > 0) {
      // The proto has metadata only and the data is separate to be found in the cellScanner.
      if (cellScanner == null) {
        throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " +
          toShortString(proto));
      }
      for (int i = 0; i < cellCount; i++) {
        if (!cellScanner.advance()) {
          throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i +
            " no cell returned: " + toShortString(proto));
        }
        Cell cell = cellScanner.current();
        if (append == null) {
          append = new Append(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
        }
        append.add(KeyValueUtil.ensureKeyValue(cell));
      }
    } else {
      append = new Append(row);
      for (ColumnValue column: proto.getColumnValueList()) {
        byte[] family = column.getFamily().toByteArray();
        for (QualifierValue qv: column.getQualifierValueList()) {
          byte[] qualifier = qv.getQualifier().toByteArray();
          if (!qv.hasValue()) {
            throw new DoNotRetryIOException(
              "Missing required field: qualifer value");
          }
          byte[] value = qv.getValue().toByteArray();
          append.add(family, qualifier, value);
        }
      }
    }
    append.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute: proto.getAttributeList()) {
      append.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return append;
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

            sequence = newSequences;
        }
        try {
            sequence.getLock().lock();
            // Now that we have the lock we need, create the sequence
            Append append = sequence.createSequence(startWith, incrementBy, cacheSize, timestamp, minValue, maxValue, cycle);
            HTableInterface htable =
                    this.getTable(PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME_BYTES);
            try {
                Result result = htable.append(append);
                return sequence.createSequence(result, minValue, maxValue, cycle);
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

            sequence = newSequences;
        }
        try {
            sequence.getLock().lock();
            // Now that we have the lock we need, create the sequence
            Append append = sequence.dropSequence(timestamp);
            HTableInterface htable = this.getTable(PhoenixDatabaseMetaData.SEQUENCE_TABLE_NAME_BYTES);
            try {
                Result result = htable.append(append);
                return sequence.dropSequence(result);
            } catch (IOException e) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.