Examples of MaxentModel


Examples of opennlp.model.MaxentModel

      modelFileName = args[ai++];
      dataFileName = args[ai++];

      ModelApplier predictor = null;
      try {
        MaxentModel m = new GenericModelReader(new File(modelFileName)).getModel();
        predictor = new ModelApplier(m);
      } catch (Exception e) {
        e.printStackTrace();
        System.exit(0);
      }
View Full Code Here

Examples of opennlp.model.MaxentModel

    StringReader smallReader = new StringReader(smallValues);
    EventStream smallEventStream = new RealBasicEventStream(
        new PlainTextByLineDataStream(smallReader));

    MaxentModel smallModel = GIS.trainModel(100,
        new OnePassRealValueDataIndexer(smallEventStream, 0), false);
    String[] contexts = smallTest.split(" ");
    float[] values = RealValueFileEventStream.parseContexts(contexts);
    double[] smallResults = smallModel.eval(contexts, values);

    String smallResultString = smallModel.getAllOutcomes(smallResults);
    System.out.println("smallResults: " + smallResultString);

    StringReader largeReader = new StringReader(largeValues);
    EventStream largeEventStream = new RealBasicEventStream(
        new PlainTextByLineDataStream(largeReader));

    MaxentModel largeModel = GIS.trainModel(100,
        new OnePassRealValueDataIndexer(largeEventStream, 0), false);
    contexts = largeTest.split(" ");
    values = RealValueFileEventStream.parseContexts(contexts);
    double[] largeResults = largeModel.eval(contexts, values);

    String largeResultString = smallModel.getAllOutcomes(largeResults);
    System.out.println("largeResults: " + largeResultString);

    assertEquals(smallResults.length, largeResults.length);
    for (int i = 0; i < smallResults.length; i++) {
      System.out.println(String.format(
          "classifiy with smallModel: %1$s = %2$f", smallModel.getOutcome(i),
          smallResults[i]));
      System.out.println(String.format(
          "classifiy with largeModel: %1$s = %2$f", largeModel.getOutcome(i),
          largeResults[i]));
      assertEquals(smallResults[i], largeResults[i], 0.01f);
    }
  }
View Full Code Here

Examples of opennlp.model.MaxentModel

      dataFileName = "";
      modelFileName = "weatherModel.txt";
  }
  Predict predictor = null;
  try {
      MaxentModel m = new GenericModelReader(new File(modelFileName)).getModel();
    predictor = new Predict(m);
  } catch (Exception e) {
      e.printStackTrace();
      System.exit(0);
  }
View Full Code Here

Examples of opennlp.model.MaxentModel

    try {
      //parser = TreebankParser.getParser(dataDir, useTagDictionary, useCaseSensitiveTagDictionary, AbstractBottomUpParser.defaultBeamSize, AbstractBottomUpParser.defaultAdvancePercentage);
     
      File d = new File(dataDir);
     
      MaxentModel buildModel = null;
      MaxentModel checkModel = null;
      POSTagger posTagger = null;
      Chunker chunker = null;
      HeadRules headRules = null;

      if (!d.isDirectory()) {
        FileInputStream fis = new FileInputStream(d);
        ParserModel model = new ParserModel(fis);
        parser = new Parser(model, AbstractBottomUpParser.defaultBeamSize, AbstractBottomUpParser.defaultAdvancePercentage);
      } else {
        // This branch is for handling models built with OpenNLp 1.4
        // Once the models are rebuilt using OpenNLP 1.5 this code should be removed
        // @see TreebankParser.java in OpenNLP 1.4
        {
          File f = new File(d, "build.bin.gz"); // TODO consider moving these literals to an XML file or properties file
          buildModel = new opennlp.maxent.io.SuffixSensitiveGISModelReader(f).getModel();
        }
       
        {
          File f = new File(d, "check.bin.gz");
          checkModel = new opennlp.maxent.io.SuffixSensitiveGISModelReader(f).getModel();
        }
       
        {
          File f = new File(d, "pos.model.bin");
          //File f = new File(d, "tag.bin.gz");
          MaxentModel posModel = new opennlp.maxent.io.SuffixSensitiveGISModelReader(f).getModel();
          if (useTagDictionary) {
            File td = new File(d, "tagdict");
            TagDictionary tagDictionary = new POSDictionary(td.getAbsolutePath()); //null;
            posTagger = new POSTaggerME((AbstractModel) posModel, tagDictionary);
          } else {
            // f = new File(d, "dict.bin.gz");
            Dictionary dictionary = null; // new Dictionary();
            posTagger = new POSTaggerME((AbstractModel) posModel, dictionary);

          }
        }
       
       
        {
          File f = new File(d, "chunk.bin.gz");
          MaxentModel chunkModel = new opennlp.maxent.io.SuffixSensitiveGISModelReader(f).getModel();
          chunker = new ChunkerME(chunkModel);
        }
     
        {
          FileReader fr = new FileReader(new File(d, "head_rules"));
View Full Code Here

Examples of opennlp.model.MaxentModel

      modelFileName = args[ai++];
      dataFileName = args[ai++];

      ModelApplier predictor = null;
      try {
        MaxentModel m = new GenericModelReader(new File(modelFileName))
            .getModel();
        predictor = new ModelApplier(m);
      } catch (Exception e) {
        e.printStackTrace();
        System.exit(0);
View Full Code Here

Examples of opennlp.tools.ml.model.MaxentModel

    String sequence[] = new String[0];
    BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);

    String outcomes[] = new String[] {"1", "2", "3"};
    MaxentModel model = new IdentityModel(outcomes);

    BeamSearch<String> bs = new BeamSearch<String>(3, cg, model);

    Sequence seq = bs.bestSequence(sequence, null);
    assertNotNull(seq);
View Full Code Here

Examples of opennlp.tools.ml.model.MaxentModel

  public void testBestSequenceOneElementInput() {
    String sequence[] = {"1"};
    BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);

    String outcomes[] = new String[] {"1", "2", "3"};
    MaxentModel model = new IdentityModel(outcomes);

    BeamSearch<String> bs = new BeamSearch<String>(3, cg, model);

    Sequence seq = bs.bestSequence(sequence, null);
    assertNotNull(seq);
View Full Code Here

Examples of opennlp.tools.ml.model.MaxentModel

  public void testBestSequence() {
    String sequence[] = {"1", "2", "3", "2", "1"};
    BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);

    String outcomes[] = new String[] {"1", "2", "3"};
    MaxentModel model = new IdentityModel(outcomes);

    BeamSearch<String> bs = new BeamSearch<String>(2, cg, model);

    Sequence seq = bs.bestSequence(sequence, null);
    assertNotNull(seq);
View Full Code Here

Examples of opennlp.tools.ml.model.MaxentModel

  public void testBestSequenceWithValidator() {
    String sequence[] = {"1", "2", "3", "2", "1"};
    BeamSearchContextGenerator<String> cg = new IdentityFeatureGenerator(sequence);

    String outcomes[] = new String[] {"1", "2", "3"};
    MaxentModel model = new IdentityModel(outcomes);

    BeamSearch<String> bs = new BeamSearch<String>(2, cg, model, new SequenceValidator<String>(){

      public boolean validSequence(int i, String[] inputSequence,
          String[] outcomesSequence, String outcome) {
View Full Code Here

Examples of opennlp.tools.ml.model.MaxentModel

    // build
    System.err.println("Training builder");
    ObjectStream<Event> bes = new ParserEventStream(parseSamples, rules,
        ParserEventTypeEnum.BUILD, mdict);
    Map<String, String> buildReportMap = new HashMap<String, String>();
    MaxentModel buildModel = TrainUtil.train(bes, mlParams.getSettings("build"), buildReportMap);
    opennlp.tools.parser.chunking.Parser.mergeReportIntoManifest(manifestInfoEntries, buildReportMap, "build");

    parseSamples.reset();

    // check
    System.err.println("Training checker");
    ObjectStream<Event>  kes = new ParserEventStream(parseSamples, rules,
        ParserEventTypeEnum.CHECK);
    Map<String, String> checkReportMap = new HashMap<String, String>();
    MaxentModel checkModel = TrainUtil.train(kes, mlParams.getSettings("check"), checkReportMap);
    opennlp.tools.parser.chunking.Parser.mergeReportIntoManifest(manifestInfoEntries, checkReportMap, "check");

    parseSamples.reset();

    // attach
    System.err.println("Training attacher");
    ObjectStream<Event>  attachEvents = new ParserEventStream(parseSamples, rules,
        ParserEventTypeEnum.ATTACH);
    Map<String, String> attachReportMap = new HashMap<String, String>();
    MaxentModel attachModel = TrainUtil.train(attachEvents, mlParams.getSettings("attach"), attachReportMap);
    opennlp.tools.parser.chunking.Parser.mergeReportIntoManifest(manifestInfoEntries, attachReportMap, "attach");

    // TODO: Remove cast for HeadRules
    return new ParserModel(languageCode, buildModel, checkModel,
        attachModel, posModel, chunkModel,
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.