Package com.ikanow.infinit.e.data_model.api

Examples of com.ikanow.infinit.e.data_model.api.ResponsePojo


   * @return
   */
  public ResponsePojo addCommunity(String ownerIdStr, String shareIdStr, String communityIdStr, String comment, boolean readWrite)
  {
    // First get the share document from the database (only works for the share owner)
    ResponsePojo rp = new ResponsePojo();
   
    BasicDBObject query = new BasicDBObject();
    query.put("_id", new ObjectId(shareIdStr));
    mustBeOwnerOrAdmin(ownerIdStr, query);
   
    try
    {
      communityIdStr = allowCommunityRegex(ownerIdStr, communityIdStr);
     
      BasicDBObject dbo = (BasicDBObject)DbManager.getSocial().getShare().findOne(query);
      if (dbo != null)
      {
        SharePojo share = SharePojo.fromDb(dbo, SharePojo.class)
       
        // Read write:
        if (null == share.getReadWrite()) {
          share.setReadWrite(new HashSet<ObjectId>());
        }
        ObjectId communityId = new ObjectId(communityIdStr);
        boolean changedReadWriteAccess = false;
        if (readWrite) { // set read-write up
          changedReadWriteAccess = share.getReadWrite().add(communityId);
        }       
        else {
          changedReadWriteAccess = share.getReadWrite().remove(communityId);
        }       

        // Check to see if the community is already in share.communities
        List<ShareCommunityPojo> communities = share.getCommunities();
        if (null == communities) {
          communities = new ArrayList<ShareCommunityPojo>();
        }
        Boolean addCommunity = true;
        for (ShareCommunityPojo scp : communities)
        {
          if (scp.get_id().toString().equalsIgnoreCase(communityIdStr)) addCommunity = false;
        }
       
        // Add new community to communities (or change its read/write permissions)
        if (addCommunity || changedReadWriteAccess)
        {         
          if (addCommunity) {
            ShareCommunityPojo cp = new ShareCommunityPojo();
            cp.set_id(new ObjectId(communityIdStr));
            cp.setName(getCommunity(new ObjectId(communityIdStr)).getName());
            cp.setComment(comment);
            communities.add(cp);
 
            // Endorse if applicable...
            if (null == share.getEndorsed()) { // legacy case
              share.setEndorsed(new HashSet<ObjectId>());
              share.getEndorsed().add(share.getOwner().get_id()); // user's personal community always endorsed
            }//TESTED
            boolean bAdmin = RESTTools.adminLookup(ownerIdStr, false); // (can be admin-on-request and not enabled, the bar for endorsing is pretty low)
            if (bAdmin || SocialUtils.isOwnerOrModeratorOrContentPublisher(communityIdStr, ownerIdStr))  {
              share.getEndorsed().add(cp.get_id());
            }
            //TESTED - adding as admin/community owner, not adding if not
          }         
          share.setModified(new Date());

          DbManager.getSocial().getShare().update(query, share.toDb());
          rp.setResponse(new ResponseObject("Share", true, "Community successfully added to the share"));
        }
        // Community already in share.communities
        else
        {
          rp.setResponse(new ResponseObject("Share", false, "Community has already been added to the share"));
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Share", false, "Unable to add community to share."));
      }
    }
    catch (Exception e)
    {
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Share", false, "Unable to add community to share: " + e.getMessage()));
    }
    return rp;
  }
View Full Code Here


   * @return
   */
  public ResponsePojo removeCommunity(String ownerIdStr, String shareIdStr, String communityIdStr)
  {
    // First get the share document from the database (only works for the share owner)
    ResponsePojo rp = new ResponsePojo();
   
    BasicDBObject query = new BasicDBObject();
    query.put("_id", new ObjectId(shareIdStr));
    mustBeOwnerOrAdmin(ownerIdStr, query);
   
    try
    {
      communityIdStr = allowCommunityRegex(ownerIdStr, communityIdStr);
     
      BasicDBObject dbo = (BasicDBObject)DbManager.getSocial().getShare().findOne(query);
      if (dbo != null)
      {
        SharePojo share = SharePojo.fromDb(dbo, SharePojo.class)

        List<ShareCommunityPojo> communities = share.getCommunities();

        // Check to see if the community is already in share.communities
        boolean removeCommunity = false;
        for (ShareCommunityPojo scp : communities)
        {
          if (scp.get_id().toString().equalsIgnoreCase(communityIdStr))
          {
            //Also remove endorsements...
            if (null != share.getEndorsed()) {
              share.getEndorsed().remove(scp.get_id());
            }//TESTED           
           
            // Also remove readWrite...
            if (null != share.getReadWrite()) {
              share.getReadWrite().remove(scp.get_id());
            }
           
            removeCommunity = true;
            communities.remove(scp);
            share.setModified(new Date());
            DbManager.getSocial().getShare().update(query, share.toDb());
            rp.setResponse(new ResponseObject("Share", true, "Community successfully removed from the share"));
            break;
          }
        }

        if (!removeCommunity)
        {
          rp.setResponse(new ResponseObject("Share", false, "Unable to remove community (does not exist in share)"));
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Share", false, "Unable to remove community from share."));
      }
    }
    catch (Exception e)
    {
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Share", false, "Unable to remove community from share: " + e.getMessage()));
    }
    return rp;
  }
View Full Code Here

  }//TESTED (normal + custom)


  public ResponsePojo createOrUpdateShare(String cookieLookup, SharePojo share, boolean readWrite, boolean returnContent)
  {
    ResponsePojo rp = null;     
    String share_id = null;   
    //need to get previous share so we can add in any previous fields
    if ( share.get_id() != null )
    {
      share_id = share.get_id().toString();
     
      ResponsePojo rp1 = getShare(cookieLookup, share_id, true);
      if ( rp1.getResponse().isSuccess() )
      {
        SharePojo previous_share = (SharePojo)rp1.getData();
        if ( share.getType() == null )
          share.setType(previous_share.getType());
        if ( share.getTitle() == null )
          share.setTitle(previous_share.getTitle());
        if ( share.getDescription() == null )
View Full Code Here

      query.logic = null;
      ObjectId queryId = new ObjectId();
      queryEngine.preQueryActivities(queryId, query, communityIdStrs);
      //TODO: this deletes stuff (make it so it won't - should fallback that function to the clear code)
      //queryEngine.test_CheckIfQueryLaunched("3.1", true);
      ResponsePojo rp = new ResponsePojo();
      ArrayList<BasicDBObject> docs = new ArrayList<BasicDBObject>(1);
      BasicDBObject doc = new BasicDBObject();
      doc.put(DocumentPojo.aggregateSignif_, 115);
      doc.put(DocumentPojo.queryRelevance_, 105);
      doc.put(DocumentPojo.score_, 110);
      docs.add(doc);
      rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
      queryEngine.postQueryActivities(queryId, docs, rp);
      queryEngine.test_CheckIfDocAdded("3.1", docs);
      // (don't clear from cache, next doc should return without making a request)
     
      // 3.2] Like 3.1 but with JsonPath
      // (clear cache here to ensure we don't just used the cached doc)
      queryEngine.test_cacheClear(true, true, fakeEndpoint.parentSource.getKey());
     
      query.qt = new ArrayList<AdvancedQueryPojo.QueryTermPojo>(1);
      qtEntVal = new AdvancedQueryPojo.QueryTermPojo();
      qtEntVal.entityValue = "test3_1";
      qtEntVal.entityType = "TestEntityIn";
      docConversionMap.remove("test:field");
      docConversionMap.remove("test:field2");
      docConversionMap.put("::field2","displayUrl");
      docConversionMap.put("::field", "TestEntityOut");
      query.qt.add(qtEntVal);
      query.logic = null;
      queryId = new ObjectId();
      queryEngine.preQueryActivities(queryId, query, communityIdStrs);
      //TODO: this deletes stuff (make it so it won't - should fallback that function to the clear code)
      //queryEngine.test_CheckIfQueryLaunched("3.1", true);
      rp = new ResponsePojo();
      docs = new ArrayList<BasicDBObject>(1);
      doc = new BasicDBObject();
      doc.put(DocumentPojo.aggregateSignif_, 115);
      doc.put(DocumentPojo.queryRelevance_, 105);
      doc.put(DocumentPojo.score_, 110);
      docs.add(doc);
      rp.setData(docs, (BasePojoApiMap<BasicDBObject>)null);
      queryEngine.postQueryActivities(queryId, docs, rp);
      queryEngine.test_CheckIfDocAdded("3.1", docs);
      // (don't clear from cache, next doc should return without making a request)
     
      // 4.1] Test cached query
View Full Code Here

  public ResponsePojo getSuggestions(String userIdStr, String term, String communityIdStrList, boolean bIncludeGeo, boolean bIncludeLinkdata, boolean bWantNoAlias)
  {
    long nSysTime = System.currentTimeMillis();   

    ResponsePojo rp = new ResponsePojo();

    ElasticSearchManager gazIndex = ElasticSearchManager.getIndex(entityIndex_);

    // Need to do a quick decomposition of the term to fit in with analyzed strings
    String escapedterm = null;
    StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
    CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
    StringBuffer sb = new StringBuffer();
    try {
      try {
        st.reset();
        while (st.incrementToken()) {
          if (sb.length() > 0) {
            sb.append(" +");
          }
          else {
            sb.append('+');           
          }
          sb.append(luceneEncodeTerm(termAtt.toString()));
        }
      }
      finally {
        st.close();     
      }
    } catch (IOException e) {
      e.printStackTrace();
    }   

    if (!term.endsWith(" ") || (0 == sb.length())) { // Could be in the middle of typing, stick a * on the end
      sb.append('*');
    }//TESTED     
    escapedterm = sb.toString();     

    // Create the search query

    SearchRequestBuilder searchOptions = gazIndex.getSearchOptions();
    BaseQueryBuilder queryObj1 = QueryBuilders.queryString(escapedterm).defaultField(EntityFeaturePojoIndexMap.Mapping.RootObject.RootProperties.alias_pri_);

    String[] communityIdStrs = SocialUtils.getCommunityIds(userIdStr, communityIdStrList);
    BaseQueryBuilder queryObj2 = QueryBuilders.boolQuery().should(QueryBuilders.termsQuery(EntityFeaturePojo.communityId_, communityIdStrs));

    BaseQueryBuilder queryObj = QueryBuilders.boolQuery().must(queryObj1).must(queryObj2);

    searchOptions.addSort(EntityFeaturePojo.doccount_, SortOrder.DESC);
    searchOptions.addFields(EntityFeaturePojo.disambiguated_name_, EntityFeaturePojo.doccount_,
        EntityFeaturePojo.type_, EntityFeaturePojo.dimension_);
    if (bIncludeGeo) {
      searchOptions.addFields(EntityFeaturePojo.geotag_);
      searchOptions.addFields(EntityFeaturePojo.ontology_type_);
    }
    if (bIncludeLinkdata) {
      searchOptions.addFields(EntityFeaturePojo.linkdata_);     
    }

    // Initial alias handling:

    AliasLookupTable aliasTable = null;
    HashMap<String, SearchSuggestPojo> aliasResults = null;
    if (!bWantNoAlias) {
      AliasManager aliasManager = AliasManager.getAliasManager();
      if (null != aliasManager) {
        aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
      }
    }
    //TESTED

    // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
    List<EntityFeaturePojo> extraEntries = null;
    if (null != aliasTable) {
      extraEntries = checkAliasMasters(aliasTable, escapedterm);
    }
    // (end initial alias handling)

    int nDesiredSize = 20;
    if (null == aliasTable) {   
      searchOptions.setSize(nDesiredSize); // will forward all 20
    }
    else {
      searchOptions.addFields(EntityFeaturePojo.index_);
      searchOptions.setSize(3*nDesiredSize); // will forward top 20 after de-aliasing

      aliasResults = new HashMap<String, SearchSuggestPojo>();
      // (We use this to ensure we only include each entity once after aliasing)
    }
    //TESTED

    // Perform the search

    SearchResponse rsp = gazIndex.doQuery(queryObj, searchOptions);

    // Format the return values

    SearchHit[] docs = rsp.getHits().getHits();     
    DimensionListPojo dimlist = new DimensionListPojo();
    int nDocsAdded = 0;
   
    if (null != extraEntries) { // Put the alias masters at the top:
      //DEBUG
      //System.out.println(Arrays.toString(extraEntries.toArray()));
      for (EntityFeaturePojo alias: extraEntries) {
        SearchSuggestPojo sp = new SearchSuggestPojo();
        if (null != alias.getDimension()) {
          sp.setDimension(alias.getDimension().toString());
        }
        else {
          sp.setDimension("What");
        }
        sp.setValue(alias.getDisambiguatedName());
        sp.setType(alias.getType());
        if (bIncludeGeo) {
          sp.setGeotag(alias.getGeotag());
        }
        sp.setOntology_type(alias.getOntology_type());
        dimlist.addSearchSuggestPojo(sp);
      }
    }//TESTED (inc geo)
   
    if (null != docs)
    {
      for (SearchHit hit: docs)
      {
        SearchHitField shf = hit.field(EntityFeaturePojo.disambiguated_name_);
        if (null == shf) { // robustness check, sometimes if the harvester goes wrong this field might be missing
          continue;
        }
        String disname = (String) shf.value();
        String type = (String) hit.field(EntityFeaturePojo.type_).value();
        String dimension = (String) hit.field(EntityFeaturePojo.dimension_).value();
        SearchSuggestPojo sp = new SearchSuggestPojo();       

        sp.setValue(disname);
        sp.setDimension(dimension);
        sp.setType(type);
        if (bIncludeGeo)
        {
          SearchHitField loc = hit.field(EntityFeaturePojo.geotag_);
          if ( loc != null )
            sp.setLocFromES((String) loc.value());
          SearchHitField ont = hit.field(EntityFeaturePojo.ontology_type_);
          if ( ont != null )
            sp.setOntology_type((String)ont.value());
        }
        if (bIncludeLinkdata) {
          SearchHitField linkdata = hit.field(EntityFeaturePojo.linkdata_);
          if ( linkdata != null )
            sp.setLinkdata(linkdata.values());
        }               

        // More alias handling
        String index = null;
        if (null != aliasTable) {
          index = (String) hit.field(EntityFeaturePojo.index_).value();
          EntityFeaturePojo alias = aliasTable.getAliasMaster(index);
          if (null != alias) { // Found!
            if (alias.getIndex().equalsIgnoreCase("discard")) { // Discard this entity
              continue;
            }
            else if ((null != alias.getDisambiguatedName()) && (null != alias.getType())) {
              // (these need to be present)

              //DEBUG (perf critical)
              //logger.debug("Alias! Replace " + index + " with " + alias.getIndex());

              index = alias.getIndex();
              disname = alias.getDisambiguatedName();
              type = alias.getType();
              if (null != alias.getDimension()) {
                dimension = alias.getDimension().toString();
              }
              else { // Guess from type
                dimension = DimensionUtility.getDimensionByType(type).toString();
              }
              // Reset values:
              sp.setValue(disname);
              sp.setDimension(dimension);
              sp.setType(type);
            }
          }
          SearchSuggestPojo existing = aliasResults.get(index);
          if (null != existing) {

            //DEBUG (perf critical)
            //logger.debug("Alias! Remove duplicate " + index);

            if ((null == existing.getGeotag()) && (null != sp.getGeotag())) {
              // (if they're both set then sigh just ignore on a first-come-first-served basis)
              existing.setGeotag(sp.getGeotag());
              existing.setOntology_type(sp.getOntology_type());
            }//TESTED
            if (null != sp.getLinkdata()) { // (here we can just combine the linkdata)
              if (null == existing.getLinkdata()) {
                existing.setLinkdata(sp.getLinkdata());
              }
              else {
                existing.getLinkdata().addAll(sp.getLinkdata());
              }
            }//TESTED
            continue; // (ie don't add this guy)
          }
          else { // add it
            aliasResults.put(index, sp);
          }
        }
        //TESTED
        // end more alias handing               

        dimlist.addSearchSuggestPojo(sp);
        // (only adds unique entries, ie handles multiple communities "ok" (only ok
        //  because it doesn't sum the doccounts across multiple communities, you'd probably
        //  want to use facets for that, but it doesn't seem worth it, especially since we're
        //  pretty short on field cache space)

        if (++nDocsAdded >= nDesiredSize) { // (can happen in the de-aliasing case)
          break;
        }//TESTED
      }     
    }
    rp.setData(dimlist);
    rp.setResponse(new ResponseObject("Suggestions",true,term));

    if (nSysTime > (lastSuggestLog + 5000)) {
      lastSuggestLog = nSysTime;
      logMsg.setLength(0);
      logMsg.append("knowledge/searchSuggest query=").append(escapedterm);
View Full Code Here

  // Geo suggestions code
  // (Haven't yet converted geo feature to string literals)

  public ResponsePojo getSuggestionsGeo(String userIdStr, String term, String communityIdStrList)
  {     
    ResponsePojo rp = new ResponsePojo();
   
    //validate term object to be a lat,lng or location
    if ( term == null )
      rp.setResponse(new ResponseObject("Suggestions Geo", false, "search term is required, was not provided"));
   
    boolean isLatLng = false;
    Double[] latlng = new Double[2];
    String[] terms = term.split(",");
    if ( terms.length == 2 )
    {
      try
      {
        latlng[0] = Double.parseDouble(terms[0]);
        latlng[1] = Double.parseDouble(terms[1]);
        isLatLng = true;
      }
      catch (Exception e)
      {
        //could not parse as double, treat as location
        //just fall through
      }
    }
    List<SearchSuggestPojo> locations = null;
    if ( isLatLng )
    {
      //lookup location name via lat/lng
       locations = reverseGeoLookup(latlng[0], latlng[1]);
     
    }
    else
    {
      //lookup lat/lngs via location name
      rp.setResponse(new ResponseObject("Suggestions Geo", false, "Search term provided could not be parsed as lat, lng... geotag lookup by name not yet supported."));
      return rp;
    }
   
    rp.setData(locations, new SearchSuggestPojoApiMap());
    rp.setResponse(new ResponseObject("Suggestions Geo", true, term));
    return rp;
  }
View Full Code Here

  private static final String assocIndex_ = AssociationFeaturePojoIndexMap.indexCollectionName_ + "/" + AssociationFeaturePojoIndexMap.indexName_;

  public ResponsePojo getAssociationSuggestions(String userIdStr, String ent1, String verb, String ent2, String field, String communityIdStrList, boolean bWantNoAlias)
  {
    ResponsePojo rp = new ResponsePojo();
    try
    {
      // Community ids, needed in a couple of places
      String[] communityIdStrs = SocialUtils.getCommunityIds(userIdStr, communityIdStrList);

      // Initial alias handling:
      AliasLookupTable aliasTable = null;
      // Initial alias handling:     
      if (!bWantNoAlias) {
        AliasManager aliasManager = AliasManager.getAliasManager();
        if (null != aliasManager) {
          aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
        }
      }//TESTED                   

      ElasticSearchManager esm = ElasticSearchManager.getIndex(assocIndex_);
      SearchRequestBuilder searchOptions = esm.getSearchOptions();
      BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
      boolean bExtraQueryTerms = false;
      String term = "";
      if ( !ent1.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity1_) )
          term = ent1;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent1);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity1_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity1_index_, ent1));
          }//TESTED
        }
      }
      if ( !verb.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.verb_) )
          term = verb;
        else
        {
          bExtraQueryTerms = true;
          boolQuery.must(QueryBuilders.queryString(new StringBuffer("+").append(verb.replaceAll("\\s+", " +")).toString()).
              defaultField(AssociationFeaturePojo.verb_));
        }
      }
      if ( !ent2.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity2_) )
          term = ent2;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent2);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity2_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity2_index_, ent2));
          }
        }//TESTED (cut and paste from entity1)
     

      String escapedterm = null;
      StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
      CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
      StringBuffer sb = new StringBuffer();
      try {
        try {
          st.reset();
          while (st.incrementToken()) {
            if (sb.length() > 0) {
              sb.append(" +");
            }
            else {
              sb.append('+');           
            }
            sb.append(luceneEncodeTerm(termAtt.toString()));
          }
        }
        finally {
          st.close();
        }
      } catch (IOException e) {
        e.printStackTrace();
      }     
      if (!term.endsWith(" ") || (0 == sb.length())) { // Could be in the middle of typing, stick a * on the end
        sb.append('*');
      }//TESTED     

      escapedterm = sb.toString();
     
      // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
      List<EntityFeaturePojo> extraEntries = null;
      BoolQueryBuilder extraQueryTerms = null;
      if (field.startsWith("entity")) {
        String indexField = field.startsWith("entity1") ? "entity1_index" : "entity2_index";
        if (null != aliasTable) {
          extraEntries = checkAliasMasters(aliasTable, escapedterm);
        }
        if (null != extraEntries) {
          extraQueryTerms = QueryBuilders.boolQuery();
          int nExtraTerms = 0;
          Iterator<EntityFeaturePojo> aliasIt = extraEntries.iterator();
          while (aliasIt.hasNext()) {
            EntityFeaturePojo alias = aliasIt.next();           
            nExtraTerms += alias.getAlias().size();
           
            if (!bExtraQueryTerms && (nExtraTerms > 20)) { // If not filtering on event type we'll be more aggressive
              break;
            }//TESTED
            if (bExtraQueryTerms && (nExtraTerms > 60)) { // If the number of terms gets too large bail anyway
              break;
            }//TESTED
           
            extraQueryTerms.should(QueryBuilders.termsQuery(indexField, alias.getAlias().toArray()));
            aliasIt.remove();
           
          }//end loop over entities
        }//if found new aliases
       
      }//(if this is an entity lookup) TESTED - including breaking out because of # of terms
     
      // (end initial alias handling)
     
      if (null == extraQueryTerms) {
        boolQuery.must(QueryBuilders.queryString(escapedterm).defaultField(field));
      }
      else {//(in this case combine the escaped term with the aliases
        extraQueryTerms.should(QueryBuilders.queryString(escapedterm).defaultField(field));
        boolQuery.must(extraQueryTerms);
      }//TESTED
      boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.communityId_, communityIdStrs));

      searchOptions.addSort(AssociationFeaturePojo.doccount_, SortOrder.DESC);

      // Work out which fields to return:
      //TODO (INF-1234) need to work out what to do with quotations and similar here (ie entityX without entityX_index)
      String returnfield;
      boolean bReturningEntities = true;
      if ( field.equals(AssociationFeaturePojo.entity1_) ) {
        returnfield = AssociationFeaturePojo.entity1_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity1_index_, AssociationFeaturePojo.doccount_);
      }
      else if ( field.equals(AssociationFeaturePojo.entity2_)) {
        returnfield = AssociationFeaturePojo.entity2_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity2_index_, AssociationFeaturePojo.doccount_);
      }
      else {
        bReturningEntities = false;
        returnfield = AssociationFeaturePojo.verb_;
        searchOptions.addFields( AssociationFeaturePojo.verb_, AssociationFeaturePojo.verb_category_,  AssociationFeaturePojo.doccount_);
      }

      int nNumSuggestionsToReturn = 20;
      if (bReturningEntities && (null != aliasTable)) {
        searchOptions.setSize(3*nNumSuggestionsToReturn); // we're going to remove some duplicates so get more than we need
      }
      else { // normal case
        searchOptions.setSize(nNumSuggestionsToReturn);
      }

      SearchResponse rsp = esm.doQuery(boolQuery, searchOptions);
      SearchHit[] docs = rsp.getHits().getHits();

      //Currently this code takes the results and puts
      //them into a set so there are no duplicates
      //duplicates occur for example when you search for
      //obama you get obama/quotation/quote1 and obama/travel/spain
      //may want to work this differnt, or atleast sum up
      //frequency
      Set<String> suggestions = new HashSet<String>();

      for (SearchHit hit: docs)
      {
        SearchHitField retField = hit.field(returnfield); // (this can be null in theory/by mistake)
        if (null != retField) {
          String suggestion = (String) retField.value();
          if (bReturningEntities && (null != aliasTable))
          {
            // More alias handling
            EntityFeaturePojo alias = aliasTable.getAliasMaster(suggestion);
            if (null != alias) { // Found!
              if (alias.getIndex().equalsIgnoreCase("discard")) { // Discard this entity
                continue;
              }
              else {
                // (these need to be present)
                suggestion = alias.getIndex();
              }
            }//TESTED
          }
          else { // (old code, still valid for verbs or no aliases)
            if ( returnfield.equals(AssociationFeaturePojo.verb_) && hit.field(AssociationFeaturePojo.verb_category_) != null )
              //for some reason verb_cat can be null!?!?! i think this is broken (ent1 facebook inc/company verb *)
            {
              String verbcat = (String)hit.field(AssociationFeaturePojo.verb_category_).value();
              suggestion += " (" + verbcat + ")";
              suggestions.add(verbcat);
            }
          }
          suggestions.add(suggestion);

          if (suggestions.size() >= nNumSuggestionsToReturn) {
            break;
          }

        } // (end return string valid)
      }//end loop over suggestions

      // Add any aliases that I couldn't explicity convert to query terms
      if ((null != extraEntries) && (suggestions.size() < nNumSuggestionsToReturn)) {
        for (EntityFeaturePojo alias: extraEntries) {
          suggestions.add(alias.getIndex());
          if (suggestions.size() >= nNumSuggestionsToReturn) {
            break;
          }         
        }
      }//(end add any remaining entries)
      //TESTED     
     
      String[] suggestionArray = new String[suggestions.size()];
      rp.setData(Arrays.asList(suggestions.toArray(suggestionArray)), (BasePojoApiMap<String>)null);

      String searchTerm = "";
      if ( field.equals(AssociationFeaturePojo.entity1_))
        searchTerm = ent1;
      else if ( field.equals(AssociationFeaturePojo.verb_))
        searchTerm = verb;
      else
        searchTerm = ent2;

      rp.setResponse(new ResponseObject("Association Suggestions", true, searchTerm));
    }
    catch (Exception ex)
    {
      ex.printStackTrace();
      rp.setResponse(new ResponseObject("Association Suggestions",false,"Response returned unsuccessfully: " + ex.getMessage()));
    }
    return rp;
 
View Full Code Here

  // (The GUI code crashes or something, and anyway I'm not convinced we want to expose this to the user)

  public ResponsePojo getAliasSuggestions(String userIdStr, String term, String field, String communityIdStrList)
  {
    long nSysTime = System.currentTimeMillis();   
    ResponsePojo rp = new ResponsePojo();

    // (keep user facing data model consistent, ie index(ex gazateer_index), actual_name/alias, disambiguated_name (ex disambiguous_name))
    if (field.equalsIgnoreCase(EntityPojo.actual_name_) || field.equalsIgnoreCase(EntityFeaturePojo.alias_)) {
      field = EntityFeaturePojo.alias_;
    }
    else if (field.equalsIgnoreCase("disambiguous_name") || field.equals(EntityPojo.disambiguated_name_)
        || field.equals(EntityFeaturePojo.disambiguated_name_)) {
      //^^ (for bw compatibility from GUI)
      field = EntityFeaturePojo.disambiguated_name_;
    }
    else if (field.equalsIgnoreCase("gazateer_index") || field.equalsIgnoreCase(EntityPojo.index_)) { // (for bw compatibility from GUI)
      field = EntityFeaturePojo.index_;
    }
    else if (!field.equalsIgnoreCase(EntityFeaturePojo.index_)) {
      rp.setResponse(new ResponseObject("aliasSuggest",false, "Field " + field + " not recognized"));
      return rp;
    }

    try
    {        
      Collection<Set<String>> aliasSet = findAliases(null, field, Arrays.asList(term), userIdStr, communityIdStrList).values();
      Set<String> superSet = new HashSet<String>();
      for (Set<String> set : aliasSet )
      {
        superSet.addAll(set);
      }      
      rp.setData(superSet, (BasePojoApiMap<String>)null);
      rp.setResponse(new ResponseObject("aliasSuggest",true,"Successfully returned aliases"));

      if (nSysTime > (lastAliasLog + 5000)) {
        lastAliasLog = nSysTime;
        logMsg.setLength(0);
        logMsg.append("knowledge/aliasSuggest query=").append(term);
        logMsg.append(" found=").append(superSet.size());
        logMsg.append(" time=").append(System.currentTimeMillis() - nSysTime).append(" ms");
        logger.info(logMsg.toString());
      }         
    }
    catch (Exception e)
    {
      // If an exception occurs log the error
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("aliasSuggest",false,"Error returning aliases"));
    }
    return rp;
  }   
View Full Code Here

   * @throws ResourceException
   */
  @Get
  public Representation get() throws ResourceException
  {
     ResponsePojo rp = new ResponsePojo();
     Date startTime = new Date();
    
     // If JSON is != null, check that it is valid JSON
     boolean isValidJson = true;
     if (json != null)
     {
       try
       {
         JSON.parse(json);
       }
       catch (Exception e)
       {
         rp.setResponse(new ResponseObject("Parsing JSON",false,"The value passed via the json parameter could not be" +
         " parsed as valid JSON."));
         isValidJson = false;
       }
     }
    
     if (isValidJson)
     {
       if ( needCookie )
       {
         cookieLookup = RESTTools.cookieLookup(cookie);
        
         if ( cookieLookup == null )
         {
           rp = new ResponsePojo();
           rp.setResponse(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
         }
         else {
           // Every call needs communityid so check now
          
           boolean validCommunities = ((communityid == null) || communityid.startsWith("*")) ?
               true : // (in this case, we apply the regex to user communities, so don't need to validate)
                 SocialUtils.validateCommunityIds(cookieLookup, communityid);

           if ( validCommunities == false )
           {
             rp = new ResponsePojo();
             rp.setResponse(new ResponseObject("Verifying Communities",false,"Community IDs are not valid for this user"));
           }
           else
           {
             if ( action.equals("saveSource") )
             {
               rp = this.source.saveSource(json, cookieLookup, communityid);
             }
             else if ( action.equals("testSource") )
             {
               rp = this.source.testSource(json, nNumDocsToReturn, bReturnFullText, bRealDedup, cookieLookup);
             }
             else if ( action.equals("add") )
             {
               rp = this.source.addSource(sourcetitle, sourcedesc, sourceurl, extracttype,
                   sourcetags, mediatype, communityid, cookieLookup);
             }
             else if ( action.equals("info") )
             {
               rp = this.source.getInfo(sourceid, cookieLookup);
             }
             else if ( action.equals("good") )
             {
               rp = this.source.getGoodSources(cookieLookup, communityid, bStripped);
             }
             else if ( action.equals("bad"))
             {
               rp = this.source.getBadSources(cookieLookup, communityid, bStripped);
             }
             else if ( action.equals("pending"))
             {
               rp = this.source.getPendingSources(cookieLookup, communityid, bStripped);
             }
             else if ( action.equals("user"))
             {
               rp = this.source.getUserSources(cookieLookup, bStripped);
             }
             else if ( action.equals("delete") || action.equals("deletedocs"))
             {
               rp = this.source.deleteSource(sourceid, communityid, cookieLookup, action.equals("deletedocs"));
             }
             else if ( action.equals("suspend"))
             {
               rp = this.source.suspendSource(sourceid, communityid, cookieLookup, shouldSuspend);
             }
           }
         } // (end communities valid)
       } // (End login succeeded)
     }
    
     Date endTime = new Date();
     rp.getResponse().setTime(endTime.getTime() - startTime.getTime());
     if (!rp.getResponse().isSuccess()) {
       if (rp.getResponse().getMessage().contains("ermission")) { // likely to be a permissions error
         RESTTools.logRequest(this);
       }
     }//TOTEST (TODO-2194)
     return new StringRepresentation(rp.toApi(), MediaType.APPLICATION_JSON);
  }   
View Full Code Here

   * @param  key  the key definition of the user ( example email@email.com )
   * @return      a JSON string representation of the person information on success
   */
  public ResponsePojo getInfo(String userIdStr, String sourceKey, String idStrOrUrl, boolean bReturnFullText, boolean returnRawData, boolean isAdmin)
  {
    ResponsePojo rp = new ResponsePojo();
   
    try
    {
      // Set up the query
      BasicDBObject query = new BasicDBObject();
      ObjectId id = null;
      if (null == sourceKey) {
        id = new ObjectId(idStrOrUrl);
        query.put(DocumentPojo._id_, id);       
      }
      else {
        query.put(DocumentPojo.sourceKey_, sourceKey);
        query.put(DocumentPojo.url_, idStrOrUrl);
      }
     
      if ( !isAdmin )
        query.put(DocumentPojo.communityId_, new BasicDBObject(MongoDbManager.in_, SocialUtils.getUserCommunities(userIdStr)));
        // (use DBObject here because DocumentPojo is pretty big and this call could conceivably have perf implications)
     
      BasicDBObject fieldsQ = new BasicDBObject();
      if (!bReturnFullText) {
        fieldsQ.put(DocumentPojo.fullText_, 0); // (XML/JSON have fullText as part of pojo)
      }
     
      BasicDBObject dbo = (BasicDBObject) DbManager.getDocument().getMetadata().findOne(query, fieldsQ);

      if ((null == dbo) ||
          ((null != dbo.get(DocumentPojo.url_)) &&  dbo.getString(DocumentPojo.url_).startsWith("?DEL?")))
      {
        if (null != id) { // this might be the update id...         
          query = new BasicDBObject(DocumentPojo.updateId_, id);
          dbo = (BasicDBObject) DbManager.getDocument().getMetadata().findOne(query, fieldsQ);
        }
      }
      //TESTED (update case, normal case, and intermediate case where both update and original still exist)
     
      if (null == dbo) {
        rp.setResponse(new ResponseObject("Doc Info",true,"Document not found"));
        return rp;
      }
      DocumentPojo dp = DocumentPojo.fromDb(dbo, DocumentPojo.class);
      if (bReturnFullText)
      {
        if (null == dp.getFullText()) { // (Some things like database records might have this stored already)
          byte[] storageArray = new byte[200000];
          DBCollection contentDB = DbManager.getDocument().getContent();
          BasicDBObject contentQ = new BasicDBObject(CompressedFullTextPojo.url_, dp.getUrl());
          contentQ.put(CompressedFullTextPojo.sourceKey_, new BasicDBObject(MongoDbManager.in_, Arrays.asList(null, dp.getSourceKey())));
          BasicDBObject fields = new BasicDBObject(CompressedFullTextPojo.gzip_content_, 1);
          BasicDBObject dboContent = (BasicDBObject) contentDB.findOne(contentQ, fields);
          if (null != dboContent) {
            byte[] compressedData = ((byte[])dboContent.get(CompressedFullTextPojo.gzip_content_));       
            ByteArrayInputStream in = new ByteArrayInputStream(compressedData);
            GZIPInputStream gzip = new GZIPInputStream(in);       
            int nRead = 0;
            StringBuffer output = new StringBuffer();
            while (nRead >= 0) {
              nRead = gzip.read(storageArray, 0, 200000);
              if (nRead > 0) {
                String s = new String(storageArray, 0, nRead, "UTF-8");
                output.append(s);
              }
            }
            dp.setFullText(output.toString());
            dp.makeFullTextNonTransient();
          }
        }       
      }
      else if (!returnRawData) {
        dp.setFullText(null); // (obviously will normally contain full text anyway)
      }
      else // if ( returnRawData )
      {
        //check if the harvest type is file, return the file instead
        //if file is db return the json
        //get source
        SourcePojo source = getSourceFromKey(dp.getSourceKey());
        if ( source.getExtractType().equals( "File" ))
        {
          //get file from harvester
          String fileURL = dp.getUrl();
          if ( dp.getSourceUrl() != null )
            fileURL = dp.getSourceUrl();
          byte[] bytes = FileHarvester.getFile(fileURL, source);
          if ( bytes == null )
          {
            // Try returning JSON instead
            String json = ApiManager.mapToApi(dp, new DocumentPojoApiMap());
            DocumentFileInterface dfp = new DocumentFileInterface();
           
            dfp.bytes = json.getBytes();
            dfp.mediaType = "application/json";
           
            rp.setResponse(new ResponseObject("Doc Info",true,"Document bytes returned successfully"));
            rp.setData(dfp, null);
            return rp;
          }
          else
          {           
            DocumentFileInterface dfp = new DocumentFileInterface();
            dfp.bytes = bytes;
            dfp.mediaType = getMediaType(fileURL);
            rp.setResponse(new ResponseObject("Doc Info",true,"Document bytes returned successfully"));
            rp.setData(dfp, null);
            return rp;
          }
        }
        else
        {       
          String json = ApiManager.mapToApi(dp, new DocumentPojoApiMap());
          DocumentFileInterface dfp = new DocumentFileInterface();
         
          dfp.bytes = json.getBytes();
          dfp.mediaType = "application/json";
         
          rp.setResponse(new ResponseObject("Doc Info",true,"Document bytes returned successfully"));
          rp.setData(dfp, null);
          return rp;
        }       
      }
      rp.setData(dp, new DocumentPojoApiMap());
      rp.setResponse(new ResponseObject("Doc Info",true,"Feed info returned successfully"));
    }//(end full text vs raw data)
    catch (Exception e)
    {
      // If an exception occurs log the error
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Doc Info",false,"error returning feed: " + e.getMessage()));
    }
    // Return Json String representing the user
    return rp;
  }
View Full Code Here

TOP

Related Classes of com.ikanow.infinit.e.data_model.api.ResponsePojo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.