Package org.tmatesoft.hg.core

Examples of org.tmatesoft.hg.core.Nodeid


            if (!df.exists()) {
              Internals implRepo = repo.getImplHelper();
              String msg = String.format("File %s known as normal in dirstate (%d, %d), doesn't exist at %s", fname, r.modificationTime(), r.size(), implRepo.getStoragePath(df));
              throw new HgInvalidFileException(msg, null).setFileName(fname);
            }
            Nodeid rev = getDirstateParentManifest().nodeid(fname);
            // rev might be null here if fname comes to dirstate as a result of a merge operation
            // where one of the parents (first parent) had no fname file, but second parent had.
            // E.g. fork revision 3, revision 4 gets .hgtags, few modifications and merge(3,12)
            // see Issue 14 for details
            if (rev == null || !areTheSame(f, df, rev)) {
View Full Code Here


  }
 
  // XXX refactor checkLocalStatus methods in more OO way
  private void checkLocalStatusAgainstBaseRevision(Set<Path> baseRevNames, ManifestRevision collect, int baseRevision, Path fname, FileInfo f, HgStatusInspector inspector) throws HgRuntimeException {
    // fname is in the dirstate, either Normal, Added, Removed or Merged
    Nodeid nid1 = collect.nodeid(fname);
    HgManifest.Flags flags = collect.flags(fname);
    HgDirstate.Record r;
    final HgDirstate ds = getDirstateImpl();
    if (nid1 == null) {
      // not known at the time of baseRevision:
      // normal, added, merged: either added or copied since base revision.
      // removed: nothing to report,
      if (ds.checkNormal(fname) != null || ds.checkMerged(fname) != null) {
        try {
          // XXX perhaps, shall take second parent into account if not null, too?
          Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname);
          if (nidFromDirstate != null) {
            // see if file revision known in this parent got copied from one of baseRevNames
            Path origin = HgStatusCollector.getOriginIfCopy(repo, fname, nidFromDirstate, collect.files(), baseRevision);
            if (origin != null) {
              inspector.copied(getPathPool().mangle(origin), fname);
              return;
            }
          }
          // fall-through, report as added
        } catch (HgInvalidFileException ex) {
          // report failure and continue status collection
          inspector.invalid(fname, ex);
        }
      } else if ((r = ds.checkAdded(fname)) != null) {
        if (r.copySource() != null && baseRevNames.contains(r.copySource())) {
          // shall not remove rename source from baseRevNames, as the source
          // likely needs to be reported as Removed as well
          inspector.copied(r.copySource(), fname);
          return;
        }
        // fall-through, report as added
      } else if (ds.checkRemoved(fname) != null) {
        // removed: removed file was not known at the time of baseRevision, and we should not report it as removed
        return;
      }
      inspector.added(fname);
    } else {
      // was known; check whether clean or modified
      Nodeid nidFromDirstate = getDirstateParentManifest().nodeid(fname);
      if ((r = ds.checkNormal(fname)) != null && nid1.equals(nidFromDirstate)) {
        // regular file, was the same up to WC initialization. Check if was modified since, and, if not, report right away
        // same code as in #checkLocalStatusAgainstFile
        final boolean timestampEqual = f.lastModified() == r.modificationTime(), sizeEqual = r.size() == f.length();
        boolean handled = false;
View Full Code Here

        if (r.length < 7) {
          repo.getLog().dump(getClass(), Severity.Error, "Expect at least 7 zero-separated fields in the merge state file, not %d. Entry skipped", r.length);
          continue;
        }
        Path p1fname = pathPool.path(r[3]);
        Nodeid nidP1 = m1.nodeid(p1fname);
        Nodeid nidCA = nodeidPool.unify(Nodeid.fromAscii(r[5]));
        HgFileRevision p1 = new HgFileRevision(hgRepo, nidP1, m1.flags(p1fname), p1fname);
        HgFileRevision ca;
        if (nidCA == nidP1 && r[3].equals(r[4])) {
          ca = p1;
        } else {
          ca = new HgFileRevision(hgRepo, nidCA, null, pathPool.path(r[4]));
        }
        HgFileRevision p2;
        if (!wcp2.isNull() || !r[6].equals(r[4])) {
          final Path p2fname = pathPool.path(r[6]);
          Nodeid nidP2 = m2.nodeid(p2fname);
          if (nidP2 == null) {
            assert false : "There's not enough information (or I don't know where to look) in merge/state to find out what's the second parent";
            nidP2 = NULL;
          }
          p2 = new HgFileRevision(hgRepo, nidP2, m2.flags(p2fname), p2fname);
View Full Code Here

    final byte lineBreak = (byte) '\n';
    int breakIndex1 = indexOf(data, lineBreak, offset, bufferEndIndex);
    if (breakIndex1 == -1) {
      throw new HgInvalidDataFormatException("Bad Changeset data");
    }
    Nodeid _nodeid = Nodeid.fromAscii(data, 0, breakIndex1);
    int breakIndex2 = indexOf(data, lineBreak, breakIndex1 + 1, bufferEndIndex);
    if (breakIndex2 == -1) {
      throw new HgInvalidDataFormatException("Bad Changeset data");
    }
    String _user;
View Full Code Here

        lr1.read(new LineReader.SimpleLineCollector(), c);
        for (String s : c) {
          int x = s.indexOf(' ');
          try {
            if (x > 0) {
              Nodeid nid = Nodeid.fromAscii(s.substring(0, x));
              String name = new String(s.substring(x+1));
              if (repo.getRepo().getChangelog().isKnown(nid)) {
                // copy name part not to drag complete line
                bm.put(name, nid);
              } else {
View Full Code Here

   */
  public void updateActive(Nodeid p1, Nodeid p2, Nodeid child) throws HgIOException, HgRepositoryLockException {
    if (activeBookmark == null) {
      return;
    }
    Nodeid activeRev = getRevision(activeBookmark);
    if (!activeRev.equals(p1) && !activeRev.equals(p2)) {
      return; // TestCommit#testNoBookmarkUpdate
    }
    if (child.equals(activeRev)) {
      return;
    }
View Full Code Here

    final HgRepository srcRepo = hgLookup.detect(srcRepoLoc);
    assertEquals("[sanity]", "default", srcRepo.getWorkingCopyBranchName());
    RepoUtils.modifyFileAppend(f1, "change1");
    HgCommitCommand commitCmd = new HgCommitCommand(srcRepo).message("Commit 1");
    assertTrue(commitCmd.execute().isOk());
    final Nodeid cmt1 = commitCmd.getCommittedRevision();
    // commit 2
    new HgCheckoutCommand(srcRepo).changeset(7).clean(true).execute();
    assertEquals("[sanity]", "no-merge", srcRepo.getWorkingCopyBranchName());
    RepoUtils.createFile(new File(srcRepoLoc, "file-new"), "whatever");
    new HgAddRemoveCommand(srcRepo).add(Path.create("file-new")).execute();
    commitCmd = new HgCommitCommand(srcRepo).message("Commit 2");
    assertTrue(commitCmd.execute().isOk());
    final Nodeid cmt2 = commitCmd.getCommittedRevision();
    //
    // pull
    HgServer server = new HgServer().start(srcRepoLoc);
    final HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
    try {
View Full Code Here

    assertEquals(HgPhase.Public, phaseHelper.getPhase(4, null));
    new HgCheckoutCommand(dstRepo).changeset(4).clean(true).execute();
    RepoUtils.modifyFileAppend(f1, "// aaa");
    HgCommitCommand commitCmd = new HgCommitCommand(dstRepo).message("Commit 1");
    assertTrue(commitCmd.execute().isOk());
    final Nodeid cmt1 = commitCmd.getCommittedRevision();
    //
    // new child rev for parent locally draft, remotely public
    assertEquals(HgPhase.Draft, phaseHelper.getPhase(5, null));
    assertEquals(HgPhase.Draft, phaseHelper.getPhase(7, null));
    assertEquals(HgPhase.Draft, phaseHelper.getPhase(8, null));
    new HgCheckoutCommand(dstRepo).changeset(8).clean(true).execute();
    RepoUtils.modifyFileAppend(f1, "// bbb");
    commitCmd = new HgCommitCommand(dstRepo).message("Commit 2");
    assertTrue(commitCmd.execute().isOk());
    final Nodeid cmt2 = commitCmd.getCommittedRevision();
    // both new revisions shall be draft
    phaseHelper = new PhasesHelper(HgInternals.getImplementationRepo(dstRepo)); // refresh PhasesHelper
    assertEquals(HgPhase.Draft, phaseHelper.getPhase(dstRepo.getChangelog().getRevisionIndex(cmt1), cmt1));
    assertEquals(HgPhase.Draft, phaseHelper.getPhase(dstRepo.getChangelog().getRevisionIndex(cmt2), cmt2));
    //
View Full Code Here

    Map<String,?> props = Collections.singletonMap(Internals.CFG_PROPERTY_CREATE_PHASEROOTS, true);
    final HgLookup hgLookup = new HgLookup(new BasicSessionContext(props, null));
    HgRepository srcRepo = hgLookup.detect(srcRepoLoc);   
    // revisions 6 and 9 are secret, so
    // index of revisions 4 and 5 won't change, but that of 7 and 8 would
    Nodeid r7 = srcRepo.getChangelog().getRevision(7);
    Nodeid r8 = srcRepo.getChangelog().getRevision(8);

    HgRepository dstRepo = hgLookup.detect(dstRepoLoc);
    HgServer server = new HgServer().publishing(false).start(srcRepoLoc);
    try {
      final HgRemoteRepository srcRemote = hgLookup.detect(server.getURL());
View Full Code Here

    FileWriter fileWriter = null;
    workingDirLock.acquire();
    try {
      fileWriter = new FileWriter(bookmarksFile);
      for (String bm : bookmarks.keySet()) {
        Nodeid nid = bookmarks.get(bm);
        fileWriter.write(String.format("%s %s\n", nid.toString(), bm));
      }
      fileWriter.flush();
    } catch (IOException ex) {
      throw new HgIOException("Failed to serialize bookmarks", ex, bookmarksFile);
    } finally {
View Full Code Here

TOP

Related Classes of org.tmatesoft.hg.core.Nodeid

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.