Package persistence.antlr.collections.impl

Examples of persistence.antlr.collections.impl.Vector


        println("token identifiers.  Some tokens are literals, and because of that");
        println("they have no identifiers.  Literals are double-quoted.");
        tabs++;

        // Enumerate all the valid token types
        Vector v = tm.getVocabulary();
        for (int i = Token.MIN_USER_TYPE; i < v.size(); i++) {
            String s = (String)v.elementAt(i);
            if (s != null) {
                println(s + " = " + i);
            }
        }
View Full Code Here


    public Grammar(String className_, Tool tool_, String superClass) {
        className = className_;
        antlrTool = tool_;
        symbols = new Hashtable();
        options = new Hashtable();
        rules = new Vector(100);
        this.superClass = superClass;
    }
View Full Code Here

  }
  /** Generate the lexer CSharp file */
  public  void gen(LexerGrammar g) throws IOException {
    // If debugging, create a new sempred vector for this grammar
    if (g.debuggingOutput)
      semPreds = new Vector();

    setGrammar(g);
    if (!(grammar instanceof LexerGrammar)) {
      antlrTool.panic("Internal error generating lexer");
    }
View Full Code Here

  public void gen(ParserGrammar g) throws IOException {

    // if debugging, set up a new vector to keep track of sempred
    //   strings for this grammar
    if (g.debuggingOutput)
      semPreds = new Vector();

    setGrammar(g);
    if (!(grammar instanceof ParserGrammar)) {
      antlrTool.panic("Internal error generating parser");
    }
View Full Code Here

      println("factory.setMaxNodeType("+g.tokenManager.maxTokenType()+");");

          // Walk the token vocabulary and generate code to register every TokenID->ASTNodeType
          // mapping specified in the  tokens {...} section with the ASTFactory.
      Vector v = g.tokenManager.getVocabulary();
      for (int i = 0; i < v.size(); i++) {
        String s = (String)v.elementAt(i);
        if (s != null) {
          TokenSymbol ts = g.tokenManager.getTokenSymbol(s);
          if (ts != null && ts.getASTNodeType() != null) {
            println("factory.setTokenTypeASTNodeType(" + s + ", \"" + ts.getASTNodeType() + "\");");
          }
View Full Code Here

    println("public static readonly string[] tokenNames_ = new string[] {");
    tabs++;

    // Walk the token vocabulary and generate a Vector of strings
    // from the tokens.
    Vector v = grammar.tokenManager.getVocabulary();
    for (int i = 0; i < v.size(); i++)
    {
      String s = (String)v.elementAt(i);
      if (s == null)
      {
        s = "<"+String.valueOf(i)+">";
      }
      if ( !s.startsWith("\"") && !s.startsWith("<") ) {
        TokenSymbol ts = (TokenSymbol)grammar.tokenManager.getTokenSymbol(s);
        if ( ts!=null && ts.getParaphrase()!=null ) {
          s = StringUtils.stripFrontBack(ts.getParaphrase(), "\"", "\"");
        }
      }
      else if (s.startsWith("\"")) {
        s = StringUtils.stripFrontBack(s, "\"", "\"");
      }
      print(charFormatter.literalString(s));
      if (i != v.size()-1) {
        _print(",");
      }
      _println("");
    }
View Full Code Here

    currentOutput = null;
    exitIfError();
  }
  protected void genTokenDefinitions(TokenManager tm) throws IOException {
    // Generate a definition for each token type
    Vector v = tm.getVocabulary();

    // Do special tokens manually
    println("public const int EOF = " + Token.EOF_TYPE + ";");
    println("public const int NULL_TREE_LOOKAHEAD = " + Token.NULL_TREE_LOOKAHEAD + ";");

    for (int i = Token.MIN_USER_TYPE; i < v.size(); i++) {
      String s = (String)v.elementAt(i);
      if (s != null) {
        if ( s.startsWith("\"") ) {
          // a string literal
          StringLiteralSymbol sl = (StringLiteralSymbol)tm.getTokenSymbol(s);
          if ( sl==null ) {
View Full Code Here

        println("token identifiers.  Some tokens are literals, and because of that");
        println("they have no identifiers.  Literals are double-quoted.");
        tabs++;

        // Enumerate all the valid token types
        Vector v = tm.getVocabulary();
        for (int i = Token.MIN_USER_TYPE; i < v.size(); i++) {
            String s = (String)v.elementAt(i);
            if (s != null) {
                println(s + " = " + i);
            }
        }
View Full Code Here

     */
    public boolean deterministicImpliedPath(BlockWithImpliedExitPath blk) {
        /** The lookahead depth for this decision considering implied exit path */
        int k;
        boolean det = true;
        Vector alts = blk.getAlternatives();
        int nalts = alts.size();
        currentBlock.altj = -1// comparing against implicit optional/exit alt

        if (DEBUG_ANALYZER) System.out.println("deterministicImpliedPath");
        for (int i = 0; i < nalts; i++) {    // check follow against all alts
            Alternative alt = blk.getAlternativeAt(i);
View Full Code Here

  }
  /** Generate the lexer C++ files */
  public  void gen(LexerGrammar g) throws IOException {
    // If debugging, create a new sempred vector for this grammar
    if (g.debuggingOutput)
      semPreds = new Vector();

    if( g.charVocabulary.size() > 256 )
      antlrTool.warning(g.getFilename()+": C++ mode does not support more than 8 bit characters (vocabulary size now: "+g.charVocabulary.size()+")");

    setGrammar(g);
View Full Code Here

TOP

Related Classes of persistence.antlr.collections.impl.Vector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.