Examples of MediaWiki


Examples of org.wikipediacleaner.api.MediaWiki

   * @throws APIException
   */
  @Override
  protected boolean retrievePageInformation(
      List<Page> pages) throws APIException {
    MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);

    // Retrieving links in each page
    if (!linksAvailable) {
      for (Page page : pages) {
        mw.retrieveAllLinks(wiki, page, Namespace.MAIN, null, false, false);
      }
      mw.block(true);
      if (shouldStop()) {
        return false;
      }
    }

    // Retrieving disambiguation information in each page
    boolean hasDisambiguationLink = false;
    if (!dabInformationAvailable) {
      if (!wiki.isDisambiguationPagesLoaded()) {
        List<Page> tmpPages = new ArrayList<Page>();
        for (Page page : pages) {
          for (int numLink = 0; numLink < page.getLinks().size(); numLink++) {
            Page link = page.getLinks().get(numLink);
            if (dabPages.containsKey(link.getTitle())) {
              page.getLinks().set(numLink, dabPages.get(link.getTitle()));
              hasDisambiguationLink = true;
            } else if (nonDabPages.containsKey(link.getTitle())) {
              page.getLinks().set(numLink, nonDabPages.get(link.getTitle()));
            } else {
              tmpPages.add(link);
            }
          }
        }
        if (!tmpPages.isEmpty()) {
          mw.retrieveDisambiguationInformation(wiki, tmpPages, null, false, false, true);
        }
        for (Page page : tmpPages) {
          if (Boolean.TRUE.equals(page.isDisambiguationPage())) {
            dabPages.put(page.getTitle(), page);
            hasDisambiguationLink = true;
          } else {
            nonDabPages.put(page.getTitle(), page);
          }
        }
      } else {
        for (Page page : pages) {
          List<Page> links = page.getLinksWithRedirect();
          for (int numLink = 0; numLink < links.size(); numLink++) {
            Page link = links.get(numLink);
            if (Boolean.TRUE.equals(wiki.isDisambiguationPage(link))) {
              link.setDisambiguationPage(Boolean.TRUE);
              hasDisambiguationLink = true;
            } else {
              link.setDisambiguationPage(Boolean.FALSE);
            }
          }
        }
      }
      if (shouldStop()) {
        return false;
      }
    }

    // Retrieving page contents
    if (hasDisambiguationLink && !getContentsAvailable()) {
      List<Page> tmpPages = new ArrayList<Page>();
      for (Page page : pages) {
        boolean toAdd = false;
        for (Page link : page.getLinks()) {
          if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
            toAdd = true;
          }
        }
        if (toAdd) {
          tmpPages.add(page);
        }
      }
      if (!tmpPages.isEmpty()) {
        mw.retrieveContents(wiki, tmpPages, true, false, false, false);
      }
    }

    return true;
  }
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   * @see org.wikipediacleaner.gui.swing.basic.BasicWorker#construct()
   */
  @Override
  public Object construct() {
    try {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      Integer count = Integer.valueOf(mw.replaceText(
          pages, replacements, getWikipedia(),
          comment, description, automaticCW, save, true));
      if (showDescription && (count > 0)) {
        InformationWindow.createInformationWindow(
            GT.__(
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
   */
  @Override
  public Object construct() {
    try {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      mw.retrieveContents(getWikipedia(), page, false, false, false, true, false);
      mw.retrieveAllBacklinks(getWikipedia(), page, true);
      ArrayList<Page> pageAndRedirects = new ArrayList<Page>();
      pageAndRedirects.add(page);
      for (Page backlink : page.getBackLinksWithRedirects()) {
        if ((backlink != null) && (backlink.isRedirect())) {
          pageAndRedirects.add(backlink);
          mw.retrieveContents(getWikipedia(), backlink, false, false, false, false, false);
        }
      }
      mw.retrieveDisambiguationInformation(getWikipedia(), pageAndRedirects, null, false, false, false);
      mw.retrieveAllLinks(getWikipedia(), page, null, null, true, false);
    } catch (APIException e) {
      return e;
    }
    return null;
  }
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
   */
  @Override
  public Object construct() {
    try {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      mw.retrieveAllBacklinks(getWikipedia(), page, true);
    } catch (APIException e) {
      return e;
    }
    return null;
  }
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
   */
  @Override
  public Object construct() {
    try {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      if (page != null) {
        mw.retrieveContents(getWikipedia(), page, true, false, true, false, true);
      } else {
        mw.retrieveContents(getWikipedia(), pages, true, true, false, true);
      }
      setText("Analyzing data");
    } catch (APIException e) {
      return e;
    }
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   */
  @Override
  public Object construct() {
    try {
      CheckWiki checkWiki = APIFactory.getCheckWiki();
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      EnumWikipedia wiki = getWikipedia();
      StringBuilder result = new StringBuilder();
      StringBuilder details = new StringBuilder();
      List<CheckErrorAlgorithm> algorithms = CheckErrorAlgorithms.getAlgorithms(wiki);
      for (CheckErrorAlgorithm algorithm : algorithms) {
        int errorNumber = algorithm.getErrorNumber();
        if (algorithm.isAvailable() &&
            CheckErrorAlgorithms.isAlgorithmActive(wiki, errorNumber)) {
          setText(GT._("Checking whitelist for error {0}", String.valueOf(errorNumber)));
          CWConfigurationError cwConfig = wiki.getCWConfiguration().getErrorConfiguration(errorNumber);
          Set<String> whiteList = cwConfig.getWhiteList();
          if (whiteList != null) {
            details.setLength(0);
            if (whiteList.size() > 0) {
              List<Page> pages = new ArrayList<Page>(whiteList.size());
              for (String pageName : whiteList) {
                Page page = DataManager.getPage(wiki, pageName, null, null, null);
                pages.add(page);
              }
              Collections.sort(pages);
              mw.retrieveContents(wiki, pages, true, false, false, false);
              for (Page page : pages) {
                if (Boolean.FALSE.equals(page.isExisting())) {
                  details.append("<li>");
                  details.append(GT._("The page {0} doesn''t exist on Wikipedia", page.getTitle()));
                  details.append("</li>");
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

      if (todoSubpageAttr != null) {
        Page todoSubpage = talkPage.getSubPage(todoSubpageAttr);
        mapTodoSubpages.put(page, todoSubpage);
      }
    }
    MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
    if (section0) {
      mw.retrieveSectionContents(wiki, mapTalkPages.values(), 0, false);
    } else {
      mw.retrieveContents(wiki, mapTalkPages.values(), false, false, false, false);
    }
    mw.retrieveContents(wiki, mapTodoSubpages.values(), true, false, false, false);
    if (mw.shouldStop()) {
      return;
    }

    // Update warning
    for (Page page : pages) {
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

   * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
   */
  @Override
  public Object construct() {
    try {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
      final API api = APIFactory.getAPI();
      EnumWikipedia wiki = getWikipedia();
      mw.retrieveContents(wiki, page, false, false, false, true, false);
      api.retrieveLinks(wiki, page, Namespace.MAIN, knownPages, true, true);

      // Retrieve disambiguation information if not already retrieved
      List<Page> links = new ArrayList<Page>();
      for (Page link : page.getLinks()) {
        if (link.isDisambiguationPage() == null) {
          links.add(link);
        }
      }
      if (!links.isEmpty()) {
        mw.retrieveDisambiguationInformation(wiki, links, knownPages, true, false, true);
      }

      // Retrieve more information on disambiguation pages
      for (Page link : page.getLinks()) {
        if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
          Iterator<Page> itLink = link.getRedirectIteratorWithPage();
          while (itLink.hasNext()) {
            Page link2 = itLink.next();
            if (!link2.isRedirect()) {
              mw.retrieveAllLinks(wiki, link2, null, knownPages, false, false);
            }
            if (link.hasWiktionaryTemplate() &&
                (link.getContents() == null)) {
              mw.retrieveContents(wiki, link2, false, false, false, true, false);
            }
          }
        }
      }

      if (CheckErrorAlgorithms.isAlgorithmActive(wiki, 508)) {
        mw.retrieveAllTemplates(wiki, page, false);
      }
      mw.block(true);
      if (Boolean.FALSE.equals(page.isExisting())) {
        mw.retrieveSimilarPages(wiki, page);
      }
      setText("Analyzing data");
      PageAnalysis analysis = page.getAnalysis(page.getContents(), true);
      CheckError.analyzeErrors(algorithms, analysis, false);
    } catch (APIException e) {
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

  protected boolean retrievePageInformation(
      List<Page> pages) throws APIException {

    // Retrieving page contents
    if (!getContentsAvailable()) {
      MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
      mw.retrieveContents(wiki, pages, true, false, false, true);
    }

    return true;
  }
View Full Code Here

Examples of org.wikipediacleaner.api.MediaWiki

        pages.addAll(constructInternalPageList());
        break;
      }

      if (retrieveDisambiguationInformation) {
        MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
        List<Page> tmpPages = new ArrayList<Page>();
        for (Page tmpPage : pages) {
          if (tmpPage.isDisambiguationPage() == null) {
            tmpPages.add(tmpPage);
          }
        }
        if (!tmpPages.isEmpty()) {
          mw.retrieveDisambiguationInformation(getWikipedia(), tmpPages, null, false, true, true);
        }
      }
      if (!shouldContinue()) {
        return null;
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.