Package com.linkedin.databus2.producers

Examples of com.linkedin.databus2.producers.EventCreationException


  throws EventCreationException
  {
    // Make sure this is an array type
    if (schema.getType() != Type.ARRAY)
    {
      throw new EventCreationException("Not an array type. " + schema.getName());
    }

    Schema elementSchema = schema.getElementType();

    GenericArray<GenericRecord> avroArray = new GenericData.Array<GenericRecord>(0, schema);

    try
    {
      ResultSet arrayResultSet = array.getResultSet();
      try
      {
        while (arrayResultSet.next())
        {
          // Create the avro record and add it to the array
          GenericRecord elemRecord = new GenericData.Record(elementSchema);
          avroArray.add(elemRecord);

          // Get the underlying structure from the database. Oracle returns the structure in the
          // second column of the array's ResultSet
          Struct struct = (Struct) arrayResultSet.getObject(2);
          putOracleRecord(elemRecord, elementSchema, struct);
        }
      }
      finally
      {
        arrayResultSet.close();
      }
    }
    catch (SQLException e)
    {
      throw new EventCreationException("putArray error: " + e.getMessage(), e);
    }

    record.put(arrayFieldName, avroArray);
  }
View Full Code Here


    {
      // The field value was null. If the field is nullable then we do nothing. Otherwise this is an error.
      boolean isNullAllowedInSchema = SchemaHelper.isNullable(field.schema());
      if (!isNullAllowedInSchema)
      {
        throw new EventCreationException("Null value not allowed for field " + recordFieldName +
                                         ":" + field.schema());
      }
    }
    else
    {
      Schema recordSchema = SchemaHelper.unwindUnionSchema(field)// == field.schema() if not a union
      Type recordFieldType = recordSchema.getType();

      switch (recordFieldType)
      {
        case BOOLEAN:
        case BYTES:
        case DOUBLE:
        case FLOAT:
        case INT:
        case LONG:
        case STRING:
        case NULL:
          putSimpleValue(fieldRecord, recordFieldName, recordFieldType, structAttribValue);
          break;
        case RECORD:
          addOracleRecordToParent(fieldRecord, recordFieldName, recordSchema, (Struct)structAttribValue);
          break;
        case ARRAY:
          putArray(fieldRecord, recordFieldName, recordSchema, (Array)structAttribValue);
          break;
        case ENUM:
        case FIXED:
        case MAP:
        case UNION:
        default:
          throw new EventCreationException("unknown struct field type: " + recordFieldName + ":" +
                                           recordFieldType);
      }
    }
  }
View Full Code Here

    {
      List<Field> fields = fieldSchema.getFields();
      Object[] structAttribs = dbFieldValue.getAttributes();

      if (fields.size() != structAttribs.length)
          throw new EventCreationException("Avro field number mismatch: avro schema field# =" +
                                           fields.size() + " ; struct " +
                                           dbFieldValue.getSQLTypeName() + " field# = " +
                                           structAttribs.length);


      for (Field field: fields)
      {
        processRecordField(fieldRecord, field, structAttribs);
      }
    }
    catch (SQLException e)
    {
      throw new EventCreationException("creation of field " + fieldSchema.getFullName(), e);
    }
  }
View Full Code Here

            timestampValueMethod = timestampClass.getMethod("timestampValue");
        } catch (Exception e)
        {
            String errMsg = "Cannot convert " + databaseFieldValue.getClass()
                    + " to long for field " + schemaFieldName + " Unable to get oracle datatypes " + e.getMessage();
            throw new EventCreationException(errMsg);
        }


        if(databaseFieldValue instanceof Timestamp)
        {
          long time = ((Timestamp) databaseFieldValue).getTime();
          record.put(schemaFieldName, time);
        }
        else if(databaseFieldValue instanceof Date)
        {
          long time = ((Date) databaseFieldValue).getTime();
          record.put(schemaFieldName, time);
        }
        else if(timestampClass.isInstance(databaseFieldValue))
        {
          try
          {
            Object tsc = timestampClass.cast(databaseFieldValue);
            Timestamp tsValue = (Timestamp) timestampValueMethod.invoke(tsc);
            long time = tsValue.getTime();
            record.put(schemaFieldName, time);
          }
          catch(Exception ex)
          {
            throw new EventCreationException("SQLException reading oracle.sql.TIMESTAMP value for field "
                + schemaFieldName, ex);
          }
        }
        else if(dateClass.isInstance(databaseFieldValue))
        {
          try
          {
            Object dsc = dateClass.cast(databaseFieldValue);
            Timestamp tsValue = (Timestamp) timestampValueMethod.invoke(dsc);
            long time = tsValue.getTime();
            record.put(schemaFieldName, time);
          }
          catch(Exception ex)
          {
            throw new EventCreationException("SQLException reading oracle.sql.TIMESTAMP value for field "
                + schemaFieldName, ex);
          }
        }
        /**
         *  This needs to stay after Oracle.sql.Timestamp because the timestamp class extends/implements the Number,BigDecimal classes,
         *  so it will pass as a number in the instanceof check. To avoid this we stick to this order.
         */
        else if(databaseFieldValue instanceof Number)
        {
          long lvalue = ((Number) databaseFieldValue).longValue();
          record.put(schemaFieldName, lvalue);
        }
        else
        {
          throw new EventCreationException("Cannot convert " + databaseFieldValue.getClass()
              + " to long for field " + schemaFieldName);
        }
        break;
      case STRING:
        if(databaseFieldValue instanceof Clob)
        {
          String text = extractClobText((Clob)databaseFieldValue, schemaFieldName);
          record.put(schemaFieldName, text);
        }
        else if (databaseFieldValue instanceof SQLXML)
        {
          SQLXML xmlInst = (SQLXML) databaseFieldValue;
          try
          {
            record.put(schemaFieldName,xmlInst.getString());
          }
          catch (SQLException e)
          {
            throw new EventCreationException("Cannot convert " + databaseFieldValue.getClass() +
                                             " to string field " + schemaFieldName + " cause: " + e);
          }
        }
        else
        {
          String text = databaseFieldValue.toString();
          record.put(schemaFieldName, text);
        }
        break;
      case NULL:
        record.put(schemaFieldName, null);
        break;
      default:
        throw new EventCreationException("unknown simple type " + avroFieldType.toString() +
                                         " for field " + schemaFieldName);
    }
  }
View Full Code Here

    {
      // The field value was null. If the field is nullable then we do nothing. Otherwise this is an error.
      boolean isNullAllowedInSchema = SchemaHelper.isNullable(field);
      if (!isNullAllowedInSchema)
      {
        throw new EventCreationException("Null value not allowed for field " + schemaFieldName);
      }
    }
    else
    {
      if (_log.isTraceEnabled())
      {
        _log.trace("record.put(\"" + schemaFieldName + "\", (" + avroFieldType + ") \"" + databaseFieldValue + "\"");
      }

      try
      {
        switch (avroFieldType)
        {
          case BOOLEAN:
          case BYTES:
          case DOUBLE:
          case FLOAT:
          case INT:
          case LONG:
          case STRING:
          case NULL:
            putSimpleValue(record, schemaFieldName, avroFieldType, databaseFieldValue);
            break;
          case RECORD:
            addOracleRecordToParent(record, schemaFieldName, fieldSchema, (Struct)databaseFieldValue);
            break;
          case ARRAY:
            putArray(record, schemaFieldName, fieldSchema, (Array)databaseFieldValue);
            break;
          case ENUM:   // exists in some Espresso schemas:  don't blindly cut and paste!
          case MAP:    // ditto
          case FIXED:
          case UNION:  // shouldn't be possible, given unwindUnionSchema() call above
          default:
            throw new EventCreationException("Don't know how to populate this type of field: " + avroFieldType);
        }
      }
      catch(ClassCastException ex)
      {
        throw new EventCreationException("Type conversion error for field name (" + field.name() +
                                         ") in source " + _sourceId + ". Value was: " + databaseFieldValue +
                                         " avro field was: " + avroFieldType, ex);
      }
    }
  }
View Full Code Here

      byte[] bytes = blob.getBytes(1, (int) blob.length());
      return ByteBuffer.wrap(bytes);
    }
    catch(SQLException ex)
    {
      throw new EventCreationException("SQLException reading BLOB value for field " + fieldName, ex);
    }
  }
View Full Code Here

        }
      }
    }
    catch(SQLException ex)
    {
      throw new EventCreationException("SQLException reading CLOB value for field " + fieldName, ex);
    }
  }
View Full Code Here

    // Create the event producer
    String uri = pConfig.getUri();
    if(uri == null)
      throw new DatabusException("Uri is required to start the relay");
    uri = uri.trim();
    EventProducer producer = null;
    if (uri.startsWith("jdbc:")) {
      SourceType sourceType = pConfig.getReplBitSetter().getSourceType();
          if (SourceType.TOKEN.equals(sourceType))
            throw new DatabusException("Token Source-type for Replication bit setter config cannot be set for trigger-based Databus relay !!");
View Full Code Here

    LOG.info("Starting. Producers are :" + _producers);

    for (Entry<PhysicalPartition, EventProducer> entry : _producers
        .entrySet()) {
      EventProducer producer = entry.getValue();
      // now start the default DB puller thread; depending on
      // configuration setting / cmd line
      if (this.getDbPullerStart()) {
        if (producer != null) {
          LOG.info("starting db puller: " + producer.getName());
          producer.start(-1L);
          LOG.info("db puller started: " + producer.getName());
        }
      }
    }
  }
View Full Code Here

  @Override
  public void pause() {
    for (Entry<PhysicalPartition, EventProducer> entry : _producers
        .entrySet()) {
      EventProducer producer = entry.getValue();

      if (null != producer) {
        if (producer.isRunning()) {
          producer.pause();
          LOG.info("EventProducer :" + producer.getName()
              + "  pause sent");
        } else if (producer.isPaused()) {
          LOG.info("EventProducer :" + producer.getName()
              + "  already paused");
        }
      }
    }
  }
View Full Code Here

TOP

Related Classes of com.linkedin.databus2.producers.EventCreationException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.