Commit 5d6c4129 authored by Shyam Upadhyay's avatar Shyam Upadhyay
Browse files

wip

parent 08552c8d
......@@ -12,6 +12,7 @@ MAIN="$PACKAGE_PREFIX.nytlabs.corpus.ACEEvents"
MAIN="$PACKAGE_PREFIX.nytlabs.corpus.core.NYTAnnotations"
MAIN="$PACKAGE_PREFIX.salience.learning.Main"
MAIN="$PACKAGE_PREFIX.readers.LazyNYTReader"
MAIN="$PACKAGE_PREFIX.features.MyTrainer"
#MAIN="$PACKAGE_PREFIX.nytlabs.corpus.core.Evaluator"
#MAIN="$PACKAGE_PREFIX.features.AllFeatures"
......
......@@ -45,20 +45,20 @@ public class MyTrainer {
NoisyLabeler trainReader = new NoisyLabeler(d1);
NoisyLabeler testReader = new NoisyLabeler(d2);
// NoisySupervisionReader trainReader = new NoisySupervisionReader(new NoisyLabeler(d1), true);
// NoisySupervisionReader testReader = new NoisySupervisionReader(new NoisyLabeler(d2), false);
FlatReader f1 = new FlatReader(new NoisyLabeler(d1), true);
FlatReader f2 = new FlatReader(new NoisyLabeler(d2), false);
getDist(trainReader);
getDist(testReader);
trainReader.reset();
testReader.reset();
getDist(trainReader);
getDist(testReader);
// getDist(trainReader);
// getDist(testReader);
// trainReader.reset();
// testReader.reset();
// getDist(trainReader);
// getDist(testReader);
// MyClassifier learner = new MyClassifier();
// BatchTrainer trainer = new BatchTrainer(learner, trainReader);
// trainer.train(1);
MyClassifier learner = new MyClassifier();
BatchTrainer trainer = new BatchTrainer(learner, trainReader);
trainer.train(1);
// getFeatureWeights(learner);
......@@ -74,13 +74,14 @@ public class MyTrainer {
// LazyNYTReader goldReader = new LazyNYTReader(cc, alreadyCached, docs);
// NoisySupervisionReader gold= new NoisySupervisionReader(goldReader);
// trainReader.reset();
// testReader.reset();
trainReader.reset();
testReader.reset();
f1.reset();
f2.reset();
// on train data
// test(learner, new AllFeatures(), trainReader);
test(learner, new AllFeatures(), f1);
// on test data
// test(learner, new AllFeatures(), testReader);
test(learner, new AllFeatures(), f2);
// LazyNYTReader.main();
}
......@@ -144,23 +145,23 @@ public class MyTrainer {
System.out.println("Dist: pos-"+pos+" neg-"+neg);
}
// public static void test(MyClassifier learner, AllFeatures feat, NoisySupervisionReader gold){
// MyClassifier c = new MyClassifier();
//
//// SparseNetworkLearner.Parameters params = new SparseNetworkLearner.Parameters();
//// c.setParameters(params);
//
// TestDiscrete.testDiscrete(new TestDiscrete(), c, new MyLabel(), gold, true, 1000);
//
//// EventInstance inst= (EventInstance) gold.next();
//// while(inst!=null) {
//// FeatureVector f = feat.classify(inst);
//// System.out.println(f.featuresSize());
//// String ans = learner.discreteValue(f);
//// ScoreSet ss = learner.scores(inst);
//// System.out.println(ss.values()+" "+ans);
//// inst= (EventInstance) gold.next();
//// }
// }
public static void test(MyClassifier learner, AllFeatures feat, FlatReader gold){
MyClassifier c = new MyClassifier();
// SparseNetworkLearner.Parameters params = new SparseNetworkLearner.Parameters();
// c.setParameters(params);
TestDiscrete.testDiscrete(new TestDiscrete(), c, new MyLabel(), gold, true, 1000);
// EventInstance inst= (EventInstance) gold.next();
// while(inst!=null) {
// FeatureVector f = feat.classify(inst);
// System.out.println(f.featuresSize());
// String ans = learner.discreteValue(f);
// ScoreSet ss = learner.scores(inst);
// System.out.println(ss.values()+" "+ans);
// inst= (EventInstance) gold.next();
// }
}
}
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment