Package org.codehaus.jackson

Examples of org.codehaus.jackson.JsonParser


    }

    public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
        try {
            if (SimpleRopRequestContext.messageFormat.get() == MessageFormat.json) {//输入格式为JSON
                JsonParser jsonParser = objectMapper.getJsonFactory().createJsonParser((String) source);
                return jsonParser.readValueAs(targetType.getObjectType());
            } else {
                Unmarshaller unmarshaller = createUnmarshaller(targetType.getObjectType());
                StringReader reader = new StringReader((String) source);
                return unmarshaller.unmarshal(reader);
            }
View Full Code Here


    // from the last record
    for (int i=0;i<numColumns;i++)
      row.set(i, null);
   
    try {
      JsonParser parser = jsonFactory.createJsonParser(json.toString());

      JsonToken token = parser.nextToken();

      while (token != null) {

        if (token == JsonToken.START_OBJECT) {
          if (parser.getCurrentName() == "geometry") {
            if (geometryColumn > -1) {
              // create geometry and insert into geometry field
              Geometry geometry =  GeometryEngine.jsonToGeometry(parser).getGeometry();
              row.set(geometryColumn, GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(geometry, null)));
            } else {
              // no geometry in select field set, don't even bother parsing
              parser.skipChildren();
            }
          } else if (parser.getCurrentName() == "attributes") {

            token = parser.nextToken();

            while (token != JsonToken.END_OBJECT && token != null) {

              // hive makes all column names in the queries column list lower case
              String name = parser.getText().toLowerCase();

              parser.nextToken();

              // figure out which column index corresponds with the attribute name
              int fieldIndex = columnNames.indexOf(name);

              if (fieldIndex >= 0) {
                setRowFieldFromParser(fieldIndex, parser);
              }

              token = parser.nextToken();
            }

            token = parser.nextToken();
          }
        }

        token = parser.nextToken();
      }

    } catch (JsonParseException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
View Full Code Here

        // Create a parser specific for this input line.  This may not be the
        // most efficient approach.
        byte[] newBytes = new byte[val.getLength()];
        System.arraycopy(val.getBytes(), 0, newBytes, 0, val.getLength());
        ByteArrayInputStream bais = new ByteArrayInputStream(newBytes);
        JsonParser p = jsonFactory.createJsonParser(bais);

        // Create the tuple we will be returning.  We create it with the right
        // number of fields, as the Tuple object is optimized for this case.
        ResourceFieldSchema[] fields = schema.getFields();
        Tuple t = tupleFactory.newTuple(fields.length);

        // Read the start object marker.  Throughout this file if the parsing
        // isn't what we expect we return a tuple with null fields rather than
        // throwing an exception.  That way a few mangled lines don't fail the
        // job.
        if (p.nextToken() != JsonToken.START_OBJECT) {
            warn("Bad record, could not find start of record " +
                val.toString(), PigWarning.UDF_WARNING_1);
            return t;
        }

        // Read each field in the record
        for (int i = 0; i < fields.length; i++) {
            t.set(i, readField(p, fields[i], i));
        }

        if (p.nextToken() != JsonToken.END_OBJECT) {
            warn("Bad record, could not find end of record " +
                val.toString(), PigWarning.UDF_WARNING_1);
            return t;
        }
        p.close();
        return t;
    }
View Full Code Here

    ));
    final Set<String> multiples = new HashSet<String>(Arrays.asList(
            "ZADD"
    ));
    JsonFactory jf = new MappingJsonFactory();
    JsonParser jsonParser = jf.createJsonParser(new URL("https://raw.github.com/antirez/redis-doc/master/commands.json"));
    final JsonNode commandNodes = jsonParser.readValueAsTree();
    Iterator<String> fieldNames = commandNodes.getFieldNames();
    ImmutableListMultimap<String,String> group = Multimaps.index(fieldNames,
            new Function<String, String>() {
              @Override
              public String apply(String s) {
View Full Code Here

  @Nonnull
  @Override
  public T deserialize( @Nonnull InputStream in ) throws IOException, VersionException {
    try {
      JsonFactory jsonFactory = JacksonSupport.getJsonFactory();
      JsonParser parser = jsonFactory.createJsonParser( in );

      T deserialized = deserialize( parser );

      ensureParserClosed( parser );
      return deserialized;
View Full Code Here

      throws JsonParseException, IOException {
 
      this.depth = depth;
      byte[] in = input.getBytes("UTF-8");
      JsonFactory f = new JsonFactory();
      JsonParser p = f.createJsonParser(
          new ByteArrayInputStream(input.getBytes("UTF-8")));
     
      ByteArrayOutputStream os = new ByteArrayOutputStream();
      Encoder cos = new BlockingBinaryEncoder(os, bufferSize);
      serialize(cos, p, os);
View Full Code Here

  private static final DoubleSchema  DOUBLE_SCHEMA =  new DoubleSchema();
  private static final BooleanSchema BOOLEAN_SCHEMA = new BooleanSchema();
  private static final NullSchema    NULL_SCHEMA =    new NullSchema();

  public static Schema parse(File file) throws IOException {
    JsonParser parser = FACTORY.createJsonParser(file);
    try {
      return Schema.parse(MAPPER.readTree(parser), new Names());
    } catch (JsonParseException e) {
      throw new SchemaParseException(e);
    }
View Full Code Here

    private Map<String, String> parseJSON(String jsonmessage){
        Map<String, String> parsed = new HashMap<String, String>();
        JsonFactory jf = new JsonFactory();
        try {
            JsonParser parser = jf.createJsonParser(jsonmessage);
            parser.nextToken(); //shift past the START_OBJECT that begins the JSON
            while (parser.nextToken() != JsonToken.END_OBJECT) {
                String fieldname = parser.getCurrentName();
                parser.nextToken(); // move to value, or START_OBJECT/START_ARRAY
                String value = parser.getText();
                parsed.put(fieldname, value);
            }
        } catch (JsonParseException e) {
            // JSON could not be parsed
            e.printStackTrace();
View Full Code Here

    Map<String, Map<String, String>> mapFields = Maps.newHashMap();
    byte[] rawPayload = null;

    try {
      JsonFactory f = new JsonFactory();
      JsonParser jp = f.createJsonParser(bais);

      jp.nextToken(); // will return JsonToken.START_OBJECT (verify?)
      while (jp.nextToken() != JsonToken.END_OBJECT) {
        String fieldname = jp.getCurrentName();
        jp.nextToken(); // move to value, or START_OBJECT/START_ARRAY
        if ("id".equals(fieldname)) {
          // contains an object
          id = jp.getText();
        } else if ("simpleFields".equals(fieldname)) {
          while (jp.nextToken() != JsonToken.END_OBJECT) {
            String key = jp.getCurrentName();
            jp.nextToken(); // move to value
            simpleFields.put(key, jp.getText());
          }
        } else if ("mapFields".equals(fieldname)) {
          // user.setVerified(jp.getCurrentToken() == JsonToken.VALUE_TRUE);
          while (jp.nextToken() != JsonToken.END_OBJECT) {
            String key = jp.getCurrentName();
            mapFields.put(key, new TreeMap<String, String>());
            jp.nextToken(); // move to value

            while (jp.nextToken() != JsonToken.END_OBJECT) {
              String mapKey = jp.getCurrentName();
              jp.nextToken(); // move to value
              mapFields.get(key).put(mapKey, jp.getText());
            }
          }

        } else if ("listFields".equals(fieldname)) {
          // user.setUserImage(jp.getBinaryValue());
          while (jp.nextToken() != JsonToken.END_OBJECT) {
            String key = jp.getCurrentName();
            listFields.put(key, new ArrayList<String>());
            jp.nextToken(); // move to value
            while (jp.nextToken() != JsonToken.END_ARRAY) {
              listFields.get(key).add(jp.getText());
            }

          }

        } else if ("rawPayload".equals(fieldname)) {
          rawPayload = Base64.decode(jp.getText());
        } else {
          throw new IllegalStateException("Unrecognized field '" + fieldname + "'!");
        }
      }
      jp.close(); // ensure resources get cleaned up timely and properly

      if (id == null) {
        throw new IllegalStateException("ZNRecord id field is required!");
      }
      record = new ZNRecord(id);
View Full Code Here

    private int importUnsorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException
    {
        int importedKeys = 0;
        long start = System.nanoTime();

        JsonParser parser = getParser(jsonFile);

        Object[] data = parser.readValueAs(new TypeReference<Object[]>(){});

        keyCountToImport = (keyCountToImport == null) ? data.length : keyCountToImport;
        SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport, ActiveRepairService.UNREPAIRED_SSTABLE);

        System.out.printf("Importing %s keys...%n", keyCountToImport);
View Full Code Here

TOP

Related Classes of org.codehaus.jackson.JsonParser

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.