Package com.google.enterprise.connector.notes.client

Examples of com.google.enterprise.connector.notes.client.NotesDateTime


      return null;
    }
  }

  public void testPollSourceDatabaseByLastModified() throws Exception {
    NotesDateTime lastUpdated =
        connectorSession.createNotesSession().createDateTime("1/1/1970");
    List<NotesDocumentMock> docs = MockFixture.generateDocuments(1);
    Vector<?> vecLastModified =
        docs.get(docs.size() - 1).getItemValue(NCCONST.ITM_LASTMODIFIED);
View Full Code Here


    LOGGER.entering(CLASS_NAME, METHOD);
    try {
      // TODO: use Date or Calendar to avoid the Notes library
      // dependency on the operating system's settings for date
      // formats.
      NotesDateTime pollTime = ns.createDateTime("1/1/1900");
      pollTime.setNow();

      NotesView templateView = cdb.getView(NCCONST.VIEWTEMPLATES);
      NotesView srcdbView = cdb.getView(NCCONST.VIEWDATABASES);
      srcdbView.refresh();
      NotesView vwSubmitQ = cdb.getView(NCCONST.VIEWSUBMITQ);
      NotesView vwCrawlQ = cdb.getView(NCCONST.VIEWCRAWLQ);

      // TODO: Make this loop shutdown aware

      NotesDocument srcdbDoc = srcdbView.getFirstDocument();
      while (null != srcdbDoc) {
        vwSubmitQ.refresh();
        vwCrawlQ.refresh();
        int qDepth = vwSubmitQ.getEntryCount() + vwCrawlQ.getEntryCount();
        LOGGER.logp(Level.FINER, CLASS_NAME, METHOD,
            "Total documents in crawl and submit queues is: " + qDepth);
        if (vwSubmitQ.getEntryCount() + vwCrawlQ.getEntryCount() > maxDepth) {
          LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
              "Queue threshold reached.  Suspending polling. size/max="
              + qDepth + "/" + maxDepth);
          srcdbDoc.recycle();
          break;
        }
        LOGGER.logp(Level.FINER, CLASS_NAME, METHOD,
            "Source Database Config Document " +
            srcdbDoc.getItemValue(NCCONST.DITM_DBNAME));
        pollSourceDatabase(ns, cdb, srcdbDoc, templateView, pollTime);
        NotesDocument prevDoc = srcdbDoc;
        srcdbDoc = srcdbView.getNextDocument(prevDoc);
        prevDoc.recycle();
      }
      vwSubmitQ.recycle();
      vwCrawlQ.recycle();
      pollTime.recycle();
      templateView.recycle();
      srcdbView.recycle();
    } catch (Exception e) {
      LOGGER.log(Level.SEVERE, CLASS_NAME, e);
    } finally {
View Full Code Here

   *
   */
  private void pollSourceDatabase(NotesSession ns, NotesDatabase cdb,
      NotesDocument srcdbDoc, NotesView templateView, NotesDateTime pollTime) {
    final String METHOD = "pollSourceDatabase";
    NotesDateTime lastUpdated = null;
    Vector<?> lastUpdatedV = null;
    LOGGER.entering(CLASS_NAME, METHOD);

    try {
      // There are configuration options to stop and disable databases
      // In either of these states, we skip processing the database
      if (1 != srcdbDoc.getItemValueInteger(NCCONST.DITM_CRAWLENABLED)) {
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Skipping database - Database is DISABLED.");
        return;
      }
      if (1 == srcdbDoc.getItemValueInteger(NCCONST.DITM_STOPPED)) {
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Skipping database - Database is STOPPED.");
        return;
      }

      // When was this database last updated?
      lastUpdatedV = srcdbDoc.getItemValue(NCCONST.DITM_LASTUPDATE);
      if (0 < lastUpdatedV.size()) {
        lastUpdated = (NotesDateTime) lastUpdatedV.firstElement();
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Last processed time was " + lastUpdated);
      } else {
        lastUpdated = ns.createDateTime("1/1/1980");
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Database has never been processed.");
      }

      // What's our poll interval?
      double pollInterval = srcdbDoc.getItemValueInteger(
          NCCONST.DITM_UPDATEFREQUENCY);
      double elapsedMinutes = pollTime.timeDifference(lastUpdated) / 60;
      LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
          "Time difference is : " + elapsedMinutes);

      // Check poll interval
      if (pollInterval > elapsedMinutes) {
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Skipping database - Poll interval has not yet elapsed.");
        lastUpdated.recycle();
        ns.recycle(lastUpdatedV);
        return;
      }

      // Get modified documents
      NotesDatabase srcdb = ns.getDatabase(null, null);
      srcdb.openByReplicaID(
          srcdbDoc.getItemValueString(NCCONST.DITM_SERVER),
          srcdbDoc.getItemValueString(NCCONST.DITM_REPLICAID));

      // Did the database open succeed? If not exit
      if (!srcdb.isOpen()) {
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Skipping database - Database could not be opened.");
        lastUpdated.recycle();
        ns.recycle(lastUpdatedV);
        srcdb.recycle();
        return;
      }

      String dbName = srcdbDoc.getItemValueString(NCCONST.DITM_DBNAME);
      String authType = srcdbDoc.getItemValueString(NCCONST.DITM_AUTHTYPE);
      LOGGER.log(Level.FINE,
          "{0} database is configured using {1} authentication type",
          new Object[] {dbName, authType});
      if (processACL(ns, cdb, srcdb, srcdbDoc)) {
        // Scan database ACLs and update H2 cache
        LOGGER.log(Level.FINE, "Scan ACLs and update H2 for {0} replica",
            srcdb.getReplicaID());
        notesConnectorSession.getUserGroupManager().updateRoles(srcdb);

        // If the ACL has changed and we are using per Document
        // ACLs we need to resend all documents.
        if (authType.contentEquals(NCCONST.AUTH_ACL)) {
          LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
              "Database ACL has changed - Resetting last update "
              + "to reindex all document ACLs.");
          lastUpdated = ns.createDateTime("1/1/1980");
        }
      }

      // From the template, we get the search string to determine
      // which documents should be processed
      NotesDocument templateDoc = templateView.getDocumentByKey(
          srcdbDoc.getItemValueString(NCCONST.DITM_TEMPLATE), true);
      String searchString = templateDoc.getItemValueString(
          NCCONST.TITM_SEARCHSTRING);
      LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
          "Search string is: " + searchString);

      NotesDocumentCollection dc = srcdb.search(searchString, lastUpdated, 0);
      LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
          srcdb.getFilePath() + " Number of documents to be processed: "
          + dc.getCount());
      NotesDocument curDoc = dc.getFirstDocument();
      while (null != curDoc) {
        String NotesURL = curDoc.getNotesURL();
        LOGGER.logp(Level.FINER, CLASS_NAME, METHOD,
            "Processing document " + NotesURL);
        if (curDoc.hasItem(NCCONST.NCITM_CONFLICT)) {
          LOGGER.logp(Level.FINER, CLASS_NAME, METHOD,
              "Skipping conflict document " + NotesURL);
          NotesDocument prevDoc = curDoc;
          curDoc = dc.getNextDocument(prevDoc);
          prevDoc.recycle();
          continue;
        }

        // Create a new crawl request
        NotesDocument crawlRequestDoc = cdb.createDocument();
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_STATE, NCCONST.STATENEW);
        crawlRequestDoc.appendItemValue(NCCONST.ITM_MIMETYPE,
            NCCONST.DEFAULT_DOCMIMETYPE);

        // Create the fields necessary to crawl the document
        crawlRequestDoc.appendItemValue(NCCONST.ITMFORM,
            NCCONST.FORMCRAWLREQUEST);
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_UNID,
            curDoc.getUniversalID());
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_REPLICAID,
            srcdbDoc.getItemValueString(NCCONST.DITM_REPLICAID));
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_SERVER,
            srcdbDoc.getItemValueString(NCCONST.DITM_SERVER));
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_TEMPLATE,
            srcdbDoc.getItemValueString(NCCONST.DITM_TEMPLATE));
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_DOMAIN,
            srcdbDoc.getItemValueString(NCCONST.DITM_DOMAIN));
        crawlRequestDoc.appendItemValue(NCCONST.NCITM_AUTHTYPE,
            srcdbDoc.getItemValueString(NCCONST.DITM_AUTHTYPE));

        // Map the lock field directly across
        crawlRequestDoc.appendItemValue(NCCONST.ITM_LOCK,
            srcdbDoc.getItemValueString(NCCONST.DITM_LOCKATTRIBUTE)
            .toLowerCase());

        // Add any database level meta data to the document
        crawlRequestDoc.appendItemValue(NCCONST.ITM_GMETAREPLICASERVERS,
            srcdbDoc.getItemValue(NCCONST.DITM_REPLICASERVERS));
        crawlRequestDoc.appendItemValue(NCCONST.ITM_GMETACATEGORIES,
            srcdbDoc.getItemValue(NCCONST.DITM_DBCATEGORIES));
        crawlRequestDoc.appendItemValue(NCCONST.ITM_GMETADATABASE,
            srcdbDoc.getItemValueString(NCCONST.DITM_DBNAME));
        crawlRequestDoc.appendItemValue(NCCONST.ITM_GMETANOTESLINK, NotesURL);

        crawlRequestDoc.save();
        crawlRequestDoc.recycle()//TEST THIS
        crawlRequestDoc = null;
        NotesDateTime lastModified = curDoc.getLastModified();
        if (lastModified.timeDifference(lastUpdated) > 0) {
          lastUpdated = lastModified;
        }
        NotesDocument prevDoc = curDoc;
        curDoc = dc.getNextDocument(prevDoc);
        prevDoc.recycle();
View Full Code Here

    boolean isReset = false;
    NotesSession nSession = null;
    NotesDatabase dbConfig = null;
    NotesView vwConfig = null;
    NotesDocument docConfig = null;
    NotesDateTime dtTarget = null;
    try {
      nSession = connectorSession.createNotesSession();
      dbConfig = nSession.getDatabase(
              connectorSession.getServer(), connectorSession.getDatabase());
      if (!dbConfig.isOpen()) {
        throw new RepositoryException(
            "GSA Configuration database is not opened");
      }
      dtTarget = nSession.createDateTime("1/1/1970");
      dtTarget.setAnyTime();
      vwConfig = dbConfig.getView(NCCONST.VIEWSYSTEMSETUP);
      docConfig = vwConfig.getFirstDocument();
      if (docConfig == null) {
        LOGGER.logp(Level.SEVERE, CLASS_NAME, METHOD,
            "System configuration document not found.");
View Full Code Here

 
  private void setLastCacheUpdate() {
    final String METHOD = "setLastCacheUpdate";
    LOGGER.entering(CLASS_NAME, METHOD);

    NotesDateTime now = null;
    NotesView systemView = null;
    NotesDocument systemDoc = null;
    try {
      now = notesSession.createDateTime("1/1/1900");
      now.setNow();
      systemView = connectorDatabase.getView(NCCONST.VIEWSYSTEMSETUP);
      systemDoc = systemView.getFirstDocument();
      if (systemDoc == null) {
        LOGGER.logp(Level.SEVERE, CLASS_NAME, METHOD,
            "System configuration document not found.");
        return;
      }
      systemDoc.replaceItemValue(NCCONST.SITM_LASTCACHEUPDATE, now);
      systemDoc.save(true);
      LOGGER.logp(Level.INFO, CLASS_NAME, METHOD,
          "Directory Cache last update time set to " + now.toString());
    } catch (RepositoryException e) {
      LOGGER.log(Level.SEVERE, CLASS_NAME, e);
    } finally {
      Util.recycle(systemDoc);
      Util.recycle(systemView);
View Full Code Here

    final String METHOD = "shouldUpdate";
    LOGGER.entering(CLASS_NAME, METHOD);

    boolean needToUpdate = true;

    NotesDateTime lastCacheUpdate = null;
    NotesDateTime now = null;
    NotesView systemView = null;
    NotesDocument systemDoc = null;
    Vector<?> vecLastCacheUpdate = null;
    try {
      lastCacheUpdate = notesSession.createDateTime("1/1/2010");
      now = notesSession.createDateTime("1/1/1900");
      now.setNow();

      systemView = connectorDatabase.getView(NCCONST.VIEWSYSTEMSETUP);
      systemDoc = systemView.getFirstDocument();
      if (systemDoc == null) {
        LOGGER.logp(Level.SEVERE, CLASS_NAME, METHOD,
            "System configuration document not found.");
        return false;
      }

      // Get the update interval from the system configuration
      int cacheUpdateInterval = connectorSession.getCacheUpdateInterval();
      vecLastCacheUpdate = systemDoc
          .getItemValue(NCCONST.SITM_LASTCACHEUPDATE);
      if (vecLastCacheUpdate.size() > 0) {
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "vecLastCacheUpdate is " + vecLastCacheUpdate);
        lastCacheUpdate = (NotesDateTime) vecLastCacheUpdate.firstElement();
        LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
            "Last directory cache update time is: " + lastCacheUpdate);
      }

      double elapsedMinutes = now.timeDifference(lastCacheUpdate) / 60;
      LOGGER.logp(Level.FINE, CLASS_NAME, METHOD,
          "Time difference since last directory cache update is: "
          + elapsedMinutes);

      // Check poll interval
View Full Code Here

  }

  protected void setDateProperties() throws RepositoryException {
    final String METHOD = "setDateProperties";

    NotesDateTime dt = (NotesDateTime) crawlDoc
        .getItemValueDateTimeArray(NCCONST.ITM_GMETALASTUPDATE).elementAt(0);
    Calendar tmpCal = Calendar.getInstance();
    tmpCal.setTime(dt.toJavaDate());
    docProps.put(SpiConstants.PROPNAME_LASTMODIFIED,
        asList(Value.getDateValue(tmpCal)));
    LOGGER.logp(Level.FINEST, CLASS_NAME, METHOD,
        "Last update is " + tmpCal.toString());

    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ss' 'Z");
    String nclastupdate = sdf.format(dt.toJavaDate());
    docProps.put(NCCONST.PROPNAME_NCLASTUPDATE,
        asList(Value.getStringValue(nclastupdate)));
    dt.recycle();

    NotesDateTime createdate = (NotesDateTime) crawlDoc
        .getItemValueDateTimeArray(NCCONST.ITM_GMETACREATEDATE).elementAt(0);
    String nccreatedate = sdf.format(createdate.toJavaDate());
    docProps.put(NCCONST.PROPNAME_CREATEDATE,
        asList(Value.getStringValue(nccreatedate)));
    createdate.recycle();
  }
View Full Code Here

TOP

Related Classes of com.google.enterprise.connector.notes.client.NotesDateTime

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.