Package org.tmatesoft.hg.core

Examples of org.tmatesoft.hg.core.Nodeid


      File f = getRepo().getFile(this);
      if (f.exists()) {
        // single revision can't be greater than 2^32, shall be safe to cast to int
        return Internals.ltoi(f.length());
      }
      Nodeid fileRev = getWorkingCopyRevision();
      if (fileRev == null) {
        throw new HgInvalidRevisionException(String.format("File %s is not part of working copy", getPath()), null, fileRevisionIndex);
      }
      fileRevisionIndex = getRevisionIndex(fileRev);
    }
View Full Code Here


        if (fis != null) {
          new FileUtils(getRepo().getSessionContext().getLog(), this).closeQuietly(fis);
        }
      }
    } else {
      Nodeid fileRev = getWorkingCopyRevision();
      if (fileRev == null) {
        // no content for this data file in the working copy - it is not part of the actual working state.
        // XXX perhaps, shall report this to caller somehow, not silently pass no data?
        return;
      }
View Full Code Here

   * @throws HgInvalidControlFileException if failed to access revlog index/data entry. <em>Runtime exception</em>
   * @throws HgRuntimeException subclass thereof to indicate other issues with the library. <em>Runtime exception</em>
   */
  private Nodeid getWorkingCopyRevision() throws HgRuntimeException {
    final Pair<Nodeid, Nodeid> wcParents = getRepo().getWorkingCopyParents();
    Nodeid p = wcParents.first().isNull() ? wcParents.second() : wcParents.first();
    final HgChangelog clog = getRepo().getChangelog();
    final int csetRevIndex;
    if (p.isNull()) {
      // no dirstate parents
      getRepo().getSessionContext().getLog().dump(getClass(), Info, "No dirstate parents, resort to TIP", getPath());
      // if it's a repository with no dirstate, use TIP then
      csetRevIndex = clog.getLastRevision();
      if (csetRevIndex == -1) {
        // shall not happen provided there's .i for this data file (hence at least one cset)
        // and perhaps exception is better here. However, null as "can't find" indication seems reasonable.
        return null;
      }
    } else {
      // common case to avoid searching complete changelog for nodeid match
      final Nodeid tipRev = clog.getRevision(TIP);
      if (tipRev.equals(p)) {
        csetRevIndex = clog.getLastRevision();
      } else {
        // bad luck, need to search honestly
        csetRevIndex = clog.getRevisionIndex(p);
      }
    }
    Nodeid fileRev = getRepo().getManifest().getFileRevision(csetRevIndex, getPath());
    // it's possible for a file to be in working dir and have store/.i but to belong e.g. to a different
    // branch than the one from dirstate. Thus it's possible to get null fileRev
    // which would serve as an indication this data file is not part of working copy
    return fileRev;
  }
View Full Code Here

    if (!isCopy(fileRevisionIndex)) {
      throw new UnsupportedOperationException();
    }
    Path.Source ps = getRepo().getSessionContext().getPathFactory();
    Path origin = ps.path(metadata.find(fileRevisionIndex, "copy"));
    Nodeid originRev = Nodeid.fromAscii(metadata.find(fileRevisionIndex, "copyrev")); // XXX reuse/cache Nodeid
    return new HgFileRevision(getRepo(), originRev, null, origin);
  }
View Full Code Here

    return csetTo;
  }

  public void build() throws HgRuntimeException {
    fileCompleteHistory.clear(); // just in case, #build() is not expected to be called more than once
    Nodeid fileRev = df.getRepo().getManifest().getFileRevision(csetTo, df.getPath());
    int fileRevIndex = df.getRevisionIndex(fileRev);
    FileRenameHistory frh = new FileRenameHistory(csetFrom, csetTo);
    if (frh.isOutOfRange(df, fileRevIndex)) {
      return;
    }
View Full Code Here

    }
   

    public void treeElement(TreeElement entry) throws HgCallbackTargetException, HgRuntimeException {
      // check consistency
      Nodeid cset = entry.changeset().getNodeid();
      errorCollector.assertEquals(entry.changesetRevision(), cset);
      Pair<HgChangeset, HgChangeset> p = entry.parents();
      Pair<HgChangeset, HgChangeset> parents_a = p;
      Pair<Nodeid, Nodeid> parents_b = entry.parentRevisions();
      if (parents_b.first().isNull()) {
View Full Code Here

  @Rule
  public ErrorCollectorExt errorCollector = new ErrorCollectorExt();
 
  @Test
  public void testRegularSetOperations() {
    Nodeid n1 = Nodeid.fromAscii("c75297c1786734589175c673db40e8ecaa032b09");
    Nodeid n2 = Nodeid.fromAscii("3b7d51ed4c65082f9235e3459e282d7ff723aa97");
    Nodeid n3 = Nodeid.fromAscii("14dac192aa262feb8ff6645a102648498483a188");
    Nodeid n4 = Nodeid.fromAscii("1deea2f332183c947937f6df988c2c6417efc217");
    Nodeid[] nodes = { n1, n2, n3 };
    RevisionSet a = new RevisionSet(nodes);
    Nodeid[] nodes1 = { n3, n4 };
    RevisionSet b = new RevisionSet(nodes1);
    Nodeid[] nodes2 = { n1, n2, n3, n4 };
View Full Code Here

      final int latestRevision = fileNode.getLastRevision();
      //
      final long start_0 = System.nanoTime();
      final Map<Nodeid, Nodeid> changesetToNodeid_0 = new HashMap<Nodeid, Nodeid>();
      for (int fileRevisionIndex = 0; fileRevisionIndex <= latestRevision; fileRevisionIndex++) {
        Nodeid fileRevision = fileNode.getRevision(fileRevisionIndex);
        Nodeid changesetRevision = fileNode.getChangesetRevision(fileRevision);
        changesetToNodeid_0.put(changesetRevision, fileRevision);
      }
      final long end_0 = System.nanoTime();
      //
      final long start_1 = System.nanoTime();
      fileMap = new HgRevisionMap<HgDataFile>(fileNode).init();
      final long start_1a = System.nanoTime();
      final Map<Nodeid, Nodeid> changesetToNodeid_1 = new HashMap<Nodeid, Nodeid>();
      for (int revision = 0; revision <= latestRevision; revision++) {
        final Nodeid nodeId = fileMap.revision(revision);
        int localCset = fileNode.getChangesetRevisionIndex(revision);
        final Nodeid changesetId = clog.getRevision(localCset);
//        final Nodeid changesetId = fileNode.getChangesetRevision(nodeId);
        changesetToNodeid_1.put(changesetId, nodeId);
      }
      final long end_1 = System.nanoTime();
      //
      final long start_2 = System.nanoTime();
      clogMap = new HgRevisionMap<HgChangelog>(clog).init();
      fileMap = new HgRevisionMap<HgDataFile>(fileNode).init();
      final Map<Nodeid, Nodeid> changesetToNodeid_2 = new HashMap<Nodeid, Nodeid>();
      final long start_2a = System.nanoTime();
      for (int revision = 0; revision <= latestRevision; revision++) {
        Nodeid nidFile = fileMap.revision(revision);
        int localCset = fileNode.getChangesetRevisionIndex(revision);
        Nodeid nidCset = clogMap.revision(localCset);
        changesetToNodeid_2.put(nidCset, nidFile);
      }
      final long end_2 = System.nanoTime();
      Assert.assertEquals(changesetToNodeid_1, changesetToNodeid_2);
      //
View Full Code Here

 
  private int[] collectLocalTagRevisions(HgRevisionMap<HgChangelog> clogrmap, TagInfo[] allTags, IntMap<List<TagInfo>> tagLocalRev2TagInfo) {
    int[] tagLocalRevs = new int[allTags.length];
    int x = 0;
    for (int i = 0; i < allTags.length; i++) {
      final Nodeid tagRevision = allTags[i].revision();
      final int tagRevisionIndex = clogrmap.revisionIndex(tagRevision);
      if (tagRevisionIndex != HgRepository.BAD_REVISION) {
        tagLocalRevs[x++] = tagRevisionIndex;
        List<TagInfo> tagsAssociatedWithRevision = tagLocalRev2TagInfo.get(tagRevisionIndex);
        if (tagsAssociatedWithRevision == null) {
View Full Code Here

    repository.getManifest().walk(new HgManifest.Inspector() {
      private int[] tagIndexAtRev = new int[4]; // it's unlikely there would be a lot of tags associated with a given cset

      public boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision) {
        // may do better here using tagLocalRev2TagInfo, but need to change a lot, too lazy now
        Nodeid cset = clogrmap.revision(changelogRevision);
        Arrays.fill(tagIndexAtRev, -1);
        for (int i = 0, x = 0; i < allTags.length; i++) {
          if (cset.equals(allTags[i].revision())) {
            tagIndexAtRev[x++] = i;
            if (x == tagIndexAtRev.length) {
              // expand twice as much
              int[] expanded = new int[x << 1];
              System.arraycopy(tagIndexAtRev, 0, expanded, 0, x);
              expanded[x] = -1; // just in case there'd be no more tags associated with this cset
              tagIndexAtRev = expanded;
            }
          }
        }
        if (tagIndexAtRev[0] == -1) {
          System.out.println("Can't happen, provided we iterate over revisions with tags only");
        }
        return true;
      }

      public boolean next(Nodeid nid, Path fname, HgManifest.Flags flags) {
        Nodeid[] m = file2rev2tag.get(fname);
        if (m == null) {
          file2rev2tag.put(fname, m = new Nodeid[allTags.length]);
        }
        for (int tagIndex : tagIndexAtRev) {
          if (tagIndex == -1) {
            break;
          }
          if (m[tagIndex] != null) {
            System.out.printf("There's another revision (%s) associated with tag %s already while we try to associate %s\n", m[tagIndex].shortNotation(), allTags[tagIndex].name(), nid.shortNotation());
          }
          m[tagIndex] = nid;
        }
        return true;
      }
     
      public boolean end(int manifestRevision) {
        return true;
      }
     
    }, tagLocalRevs);
    System.out.printf("Cache built: %d ms\n", System.currentTimeMillis() - start);
    //
    // look up specific file. This part is fast.
    HgDataFile fileNode = repository.getFileNode(targetPath);
    final Nodeid[] allTagsOfTheFile = file2rev2tag.get(targetPath);
    // TODO if fileNode.isCopy, repeat for each getCopySourceName()
    for (int fileRevIndex = 0; fileRevIndex < fileNode.getRevisionCount(); fileRevIndex++) {
      Nodeid fileRev = fileNode.getRevision(fileRevIndex);
      int changesetRevIndex = fileNode.getChangesetRevisionIndex(fileRevIndex);
      List<String> associatedTags = new LinkedList<String>();
      for (int i = 0; i < allTagsOfTheFile.length; i++) {
        if (fileRev.equals(allTagsOfTheFile[i])) {
          associatedTags.add(allTags[i].name());
        }
      }
      System.out.printf("%3d%7d%s\n", fileRevIndex, changesetRevIndex, associatedTags);
    }
View Full Code Here

TOP

Related Classes of org.tmatesoft.hg.core.Nodeid

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.