Package weka.classifiers

Examples of weka.classifiers.Classifier


          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here


        
      if(target.getSenses().size()>1)
    {
        Instances ins=this.loadFilteredInstances(target);
      System.out.println("Trying "+target.getIndex());
      Classifier cModel=null;
      String Evaluator=this.getValue("evaluator");
      String Search=this.getValue("search");
      if(Evaluator==null)
        Evaluator="";
      if(Search==null)
        Search="";
      File f=new File("data/weka/"+this.classifier+"@"+Dictionary.normalizeLemmaforFile(target.getLemma())+"@"+Evaluator+"@"+Search+".csf");
      if(f.exists())
      {
        cModel=(Classifier)Util.loadObject(f);
      }
      else
      {
        cModel = (Classifier)Class.forName(this.classifier).newInstance();       
             cModel.buildClassifier(ins);
             Util.writeObject(f, cModel);
      }
      //Instantiate the classifier
        Instance base=ins.firstInstance();
      Instance sample=new Instance(base.numAttributes());
      sample.setDataset(ins);
      int c[]=new int[base.numAttributes()];
      for(int i=0;i<base.numAttributes();i++)
      {
        c[i]=0;
      }
      for(int i=0;i<base.numAttributes();i++)
      {
        Attribute att=base.attribute(i);       
        for(AmbiguousWord word:window)
         {
           if(word.getLemma().equals(att.name()))
           {
             c[i]++; 
           }
         }
      }
      for(int i=0;i<base.numAttributes();i++)
      {
        sample.setValue(i, ((double)c[i]));
      }
     
      double []w=cModel.distributionForInstance(sample);
      for(int j=0;j<target.getSenses().size();j++)
      {
        ArrayList<String> dwords=new ArrayList<String>(window.size());
        for(AmbiguousWord word:window)
        {
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

      uas_F=new RBoxPlot<String>("Time","F-measure",new File("time_"+name+"_f.pdf")),
      uas_Diff=new RBoxPlot<String>("Time","Diff-measure",new File("time_"+name+"_Diff.pdf"));
    SquareBagPlot gr_diff_to_f = new SquareBagPlot("f-measure","diff-based measure",new File("diff-to-f.pdf"),0,1,true);

    Set<Integer> allFrames = collectionOfTraces.get(UAVAllSeeds).tracesForUAVandFrame.get(UAVAllSeeds).keySet();
    Classifier classifiers[] = loadClassifierFromArff(arffName);
    ProgressIndicator progress = new ProgressIndicator("UAS", allFrames.size()*classifiers.length);
    LearnerEvaluationConfiguration initConfiguration = new LearnerEvaluationConfiguration(learnerInitConfiguration.config);
    initConfiguration.setLabelConverter(learnerInitConfiguration.getLabelConverter());// we do not copy if-then automata here because we do not wish to augment from if-then on every iteration because our properties are pairwise and this permits augmentation to be carried out first thing and not any more.
    initConfiguration.config.setUseConstraints(false);// do not use if-then during learning (refer to the explanation above)
   
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

      uas_F=new RBoxPlot<String>("Time","F-measure",new File("time_"+name+"_f.pdf")),
      uas_Diff=new RBoxPlot<String>("Time","Diff-measure",new File("time_"+name+"_Diff.pdf"));
    SquareBagPlot gr_diff_to_f = new SquareBagPlot("f-measure","diff-based measure",new File("diff-to-f.pdf"),0,1,true);

    Set<Integer> allFrames = collectionOfTraces.get(UAVAllSeeds).tracesForUAVandFrame.get(UAVAllSeeds).keySet();
    Classifier classifiers[] = loadClassifierFromArff(arffName);
    ProgressIndicator progress = new ProgressIndicator("UAS", allFrames.size()*classifiers.length);
    LearnerEvaluationConfiguration initConfiguration = new LearnerEvaluationConfiguration(learnerInitConfiguration.config);
    initConfiguration.setLabelConverter(learnerInitConfiguration.getLabelConverter());// we do not copy if-then automata here because we do not wish to augment from if-then on every iteration because our properties are pairwise and this permits augmentation to be carried out first thing and not any more.
    initConfiguration.config.setUseConstraints(false);// do not use if-then during learning (refer to the explanation above)
   
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

        super.tearDown();
    }
   
    public void testBuildAndClassifyJ48() throws Exception
    {
        Classifier classifier = new J48();
        FastVector attributes = new FastVector();
        FastVector values = new FastVector();
        values.addElement("sunny");
        values.addElement("overcast");
        values.addElement("rain");
        attributes.addElement(new Attribute("outlook", values));
        values = new FastVector();
        values.addElement("false");
        values.addElement("true");
        attributes.addElement(new Attribute("windy", values));
        attributes.addElement(new Attribute("humidity"));
        attributes.addElement(new Attribute("temperature"));
        values = new FastVector();
        values.addElement("yes");
        values.addElement("no");
        attributes.addElement(new Attribute("play", values));
        Instances dataset = new Instances("", attributes, 15);
        dataset.add(createInstance(dataset, "sunny", "false", 85, 85, "no"));
        dataset.add(createInstance(dataset, "sunny", "true", 80, 90, "no"));
        dataset.add(createInstance(dataset, "overcast", "false", 83, 78, "yes"));
        dataset.add(createInstance(dataset, "rain", "false", 70, 96, "yes"));
        dataset.add(createInstance(dataset, "rain", "false", 68, 80, "yes"));
        dataset.add(createInstance(dataset, "rain", "true", 65, 70, "no"));
        dataset.add(createInstance(dataset, "overcast", "true", 64, 65, "yes"));
        dataset.add(createInstance(dataset, "sunny", "false", 72, 95, "no"));
        dataset.add(createInstance(dataset, "sunny", "false", 69, 70, "yes"));
        dataset.add(createInstance(dataset, "rain", "false", 75, 80, "yes"));
        dataset.add(createInstance(dataset, "sunny", "true", 75, 70, "yes"));
        dataset.add(createInstance(dataset, "overcast", "true", 72, 90, "yes"));
        dataset.add(createInstance(dataset, "overcast", "false", 81, 75, "yes"));
        dataset.add(createInstance(dataset, "rain", "true", 71, 91, "no"));
        dataset.setClassIndex(4);
        classifier.buildClassifier(dataset);
        System.out.println(classifier);
       
        double result = classifier.classifyInstance(createInstance(dataset, "sunny", "false", 85, 85, Null.getValue()));
        assertEquals(1.0, result, .1);
        result = classifier.classifyInstance(createInstance(dataset, "sunny", "true", 80, 90, Null.getValue()));
        assertEquals(1.0, result, .1);
        result = classifier.classifyInstance(createInstance(dataset, "overcast", "false", 83, 78, "yes"));
        assertEquals(0.0, result, .1);
        result = classifier.classifyInstance(createInstance(dataset, "rain", "false", 70, 96, "yes"));
        assertEquals(0.0, result, .1);
        result = classifier.classifyInstance(createInstance(dataset, "rain", "false", 68, 80, "yes"));
        assertEquals(0.0, result, .1);
    }
View Full Code Here

          mAlgorithmClass = mClassifierName;
       
        LOG.debug("WEKA options = " + options);
        LOG.debug("Classifier class name = " + mAlgorithmClass);

        Classifier classifier;
        try
        {
          classifier = loadClassifier(options);
            classifier.buildClassifier(dataset);
           
            if(mSummaryOutput != null)
            {
                StringBuilder columns = new StringBuilder();
              columns.append("Trained on: [");
              for(int i=0; i<metadata.getColumnCount(); i++)
              {
                columns.append(metadata.getColumnMetadata(i).getName());
                columns.append(" ");
              }
              columns.append("]\n");
                mSummaryOutput.write(ControlBlock.LIST_BEGIN);
                String description = "";
                if(mDescriptionInput != null)
                  description = TupleUtilities.getString(readBlock(mDescriptionInput), INPUT_DESCRIPTION);

                mSummaryOutput.write(("DESCRIPTION: \n" + description + classifier.toString() + "\n" + columns ).getBytes());
              mSummaryOutput.write(ControlBlock.LIST_END);
            }
            if(mFoldsInput != null) {
              Integer folds = TupleUtilities.getNumber(readBlock(mFoldsInput), INPUT_FOLDS);
              Evaluation ev = new Evaluation(dataset);
View Full Code Here

      options = DEFAULT_OPTIONS;
     
   
        Class<? extends Classifier> classifierClass =
            Class.forName(mAlgorithmClass).asSubclass(Classifier.class);
        Classifier classifier = classifierClass.newInstance();
    String[] opt = weka.core.Utils.splitOptions(options);
    ((OptionHandler)classifier).setOptions(opt);
     
        return classifier;
    }
View Full Code Here

TOP

Related Classes of weka.classifiers.Classifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.