Package joshua.decoder.hypergraph

Examples of joshua.decoder.hypergraph.DiskHyperGraph


    SymbolTable p_symbol = new BuildinSymbol(null);
    int baseline_lm_feat_id =0;
    int num_sents =5;
    double scale=1.0;
    DefaultSemiringParser ds = new HypLenExpectation(1,0,scale);
    DiskHyperGraph dhg_test = new DiskHyperGraph(p_symbol, baseline_lm_feat_id, true, null); //have model costs stored
    dhg_test.initRead(f_dev_items, f_dev_rules,null);
    for(int sent_id=0; sent_id < num_sents; sent_id ++){
      System.out.println("#Process sentence " + sent_id);
      HyperGraph hg_test = dhg_test.readHyperGraph();     
      ds.insideEstimationOverHG(hg_test);
      CompositeSemiring goalSemiring = ds.getGoalSemiringMember(hg_test);
      goalSemiring.normalizeFactors();
      goalSemiring.printInfor();
    }   
View Full Code Here


        logger.warning("No language model feature function found, but save disk hg");
      }else{
        lmFeatID = languageModel.getFeatureID();
      }
     
      this.hypergraphSerializer = new DiskHyperGraph(
          this.symbolTable,
          lmFeatID,
          true, // always store model cost
          this.featureFunctions);
       
View Full Code Here

      else
        f_confusion_grammar="C:\\Users\\zli\\Documents\\cellspecific.confusion.grammar";
      */
     
      ConfusionExtractor g_con = new ConfusionExtractor(p_symbol);
      DiskHyperGraph dhg = new DiskHyperGraph(p_symbol, baseline_lm_feat_id, saveModelCosts, null);
      dhg.initRead(f_hypergraphs, f_rule_tbl, null);
      //int total_num_sent = 5;
      for(int sent_id=0; sent_id < total_num_sent; sent_id ++){
        System.out.println("############Process sentence " + sent_id);
        HyperGraph hg = dhg.readHyperGraph();
       
        if(itemSpecific)
          g_con.itemSpecificConfusionExtraction(hg);
        else
          g_con.cellSpecificConfusionExtraction(hg,hg.sentLen);
View Full Code Here

    int ngramStateID =0;
   
    double sumEntropy = 0;
   
    EntropyOnHGUsingIO ds = new EntropyOnHGUsingIO(1.0);
    DiskHyperGraph diskHG = new DiskHyperGraph(symbolTbl, ngramStateID, true, null); //have model costs stored
    diskHG.initRead(f_dev_items, f_dev_rules,null);
    for(int sentID=0; sentID < numSents; sentID ++){
      System.out.println("#Process sentence " + sentID);
      HyperGraph testHG = diskHG.readHyperGraph();   
     
      ds.setHyperGraph(testHG);
     
      ds.runInsideOutside();
      ds.printGoalX();
View Full Code Here

    int ngramStateID =0;
   
    double sumEntropy = 0;
   
    DefaultInsideSemiringParser ds = new EntropyOnHG(1.0);
    DiskHyperGraph dhg_test = new DiskHyperGraph(p_symbol, ngramStateID, true, null); //
    dhg_test.initRead(f_dev_items, f_dev_rules,null);
    for(int sent_id=0; sent_id < num_sents; sent_id ++){
      System.out.println("#Process sentence " + sent_id);
      HyperGraph hg_test = dhg_test.readHyperGraph();   
      ds.setHyperGraph(hg_test);
      ds.insideEstimationOverHG();
      ExpectationSemiring<LogSemiring,ScalarPM> goalSemiring = (ExpectationSemiring<LogSemiring,ScalarPM>) ds.getGoalK();
      //goal_semiring.printInfor();
      goalSemiring.getR().getValue().multiLogNumber(-goalSemiring.getP().getLogValue());//normalize
View Full Code Here

    int ngramStateID =0;
   
    double sumEntropy = 0;
   
    DefaultSemiringParser ds = new EntropyOnHG(1,0,scale);
    DiskHyperGraph diskHG = new DiskHyperGraph(symbolTbl, ngramStateID, true, null); //have model costs stored
    diskHG.initRead(f_dev_items, f_dev_rules,null);
    for(int sentID=0; sentID < num_sents; sentID ++){
      System.out.println("#Process sentence " + sentID);
      HyperGraph testHG = diskHG.readHyperGraph();     
      ds.insideEstimationOverHG(testHG);
      ExpectationSemiring goalSemiring = (ExpectationSemiring) ds.getGoalSemiringMember(testHG);
      //goal_semiring.printInfor();
      goalSemiring.normalizeFactors();
      goalSemiring.printInfor();
View Full Code Here

    int baseline_lm_feat_id =0;
   
    ArrayList<HyperGraph> hyperGraphs = new  ArrayList<HyperGraph>();;
   
    HypLenSquareExpectation ds = new HypLenSquareExpectation(scale);
    DiskHyperGraph diskHG = new DiskHyperGraph(p_symbol, baseline_lm_feat_id, true, null); //have model costs stored
    diskHG.initRead(f_dev_items, f_dev_rules,null);
    for(int k=0;k<136; k++){
      for(int sent_id=0; sent_id < num_sents; sent_id ++){
        System.out.println("#Process sentence " + sent_id);
        HyperGraph hg_test;
        if(k==0){ 
          hg_test = diskHG.readHyperGraph();
          hyperGraphs.add(hg_test);
        }else
          hg_test = hyperGraphs.get(sent_id);
        ds.setHyperGraph(hg_test);
        ds.runInsideOutside();
        //ds.printTotalX();
        ds.normalizeGoal();
        double lenSecondOrderExpectation = ds.getSecondOrderExpectation();
         
       
        System.out.println("hyplensquireexpectation is " + lenSecondOrderExpectation);
        ds.clearState();
      }
      HypLenSquareExpectation.logger.info("numTimesCalled=" + k);
    }
    diskHG.closeReaders();
   
   
   
  }
View Full Code Here

    int baseline_lm_feat_id =0;
    int num_sents =5;
   
    double scale=1.0;
    DefaultSemiringParser ds = new HypLenSquareExpectation(1,0,scale);
    DiskHyperGraph dhg_test = new DiskHyperGraph(p_symbol, baseline_lm_feat_id, true, null); //have model costs stored
    dhg_test.initRead(f_dev_items, f_dev_rules,null);
    for(int sent_id=0; sent_id < num_sents; sent_id ++){
      System.out.println("#Process sentence " + sent_id);
      HyperGraph hg_test = dhg_test.readHyperGraph();     
      ds.insideEstimationOverHG(hg_test);
      CompositeSemiring goal_semiring = ds.getGoalSemiringMember(hg_test);
      goal_semiring.normalizeFactors();
      goal_semiring.printInfor();
    }   
View Full Code Here

    long start_time0 = System.currentTimeMillis();
    long time_on_reading = 0;
    long time_on_orc_extract = 0;
    BufferedReader t_reader_ref = FileUtility.getReadFileStream(f_ref_files);
   
    DiskHyperGraph dhg_read  = new DiskHyperGraph(p_symbolTable, baseline_lm_feat_id, saveModelScores, null);
   
 
    dhg_read.initRead(f_hypergraphs, f_rule_tbl, null);
   
    KBestExtractor oracleKbestExtractor = new KBestExtractor(p_symbolTable, extract_unique_nbest, false, false, true,  false, true);//extract kbest oracles
    KBestExtractor rerankOracleKbestExtractor = new KBestExtractor(p_symbolTable, extract_unique_nbest, false, false, false,  false, true);//extract kbest oracles
    int topKOracles= 500;//TODO
    //OracleExtractionOnHGV2 orc_extractor = new OracleExtractionOnHGV2(p_symbolTable, baseline_lm_feat_id);
    OracleExtractionOnHGV3 orc_extractor = new OracleExtractionOnHGV3(p_symbolTable);
    String ref_sent= null;
    int sent_id=0;
    long start_time = System.currentTimeMillis();
    while( (ref_sent=FileUtility.read_line_lzf(t_reader_ref))!= null ){
      System.out.println("############Process sentence " + sent_id);
      start_time = System.currentTimeMillis();
      sent_id++;
      //if(sent_id>10)break;
     
      HyperGraph hg = dhg_read.readHyperGraph();
      if(hg==null)continue;
     
      double orc_bleu=0;
     
      //System.out.println("read disk hyp: " + (System.currentTimeMillis()-start_time));
View Full Code Here

    NbestMinRiskReranker mbrReranker = new NbestMinRiskReranker(false, 1.0);
 
    BufferedWriter onebestWriter =  FileUtilityOld.getWriteFileStream(onebestFile)
   
    System.out.println("############Process file  " + testItemsFile);
    DiskHyperGraph diskHG = new DiskHyperGraph(symbolTbl, ngramStateID, true, null); //have model costs stored
    diskHG.initRead(testItemsFile, testRulesFile,null);     
    for(int sentID=0; sentID < numSents; sentID ++){
      System.out.println("#Process sentence " + sentID);
      HyperGraph testHG = diskHG.readHyperGraph();
     
      List<String> nbest = cruncher.processOneSent(testHG, sentID, true);//produce the disorder nbest
     
      String bestHyp = mbrReranker.processOneSent(nbest, sentID);//nbest: list of unique strings
     
View Full Code Here

TOP

Related Classes of joshua.decoder.hypergraph.DiskHyperGraph

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.