6. public class FirstExample
{
public static void Main()
{
Variable<bool> firstCoin = Variable.Bernoulli(0.5).Named("firstCoin");
Variable<bool> secondCoin =
Variable.Bernoulli(0.5).Named("secondCoin");
Variable<bool> bothHeads = (firstCoin &
secondCoin).Named("bothHeads");
InferenceEngine ie = new InferenceEngine();
if (!(ie.Algorithm is VariationalMessagePassing))
{
Console.WriteLine("Probability both coins are heads:
"+ie.Infer(bothHeads));
bothHeads.ObservedValue=false;
Console.WriteLine("Probability distribution over firstCoin: " +
ie.Infer(firstCoin));
}
else
Console.WriteLine("This example does not run with Variational
Message Passing");
}
}
first example of Infer.NET from Microsoft Research
7. Probability both coins 最初は2枚のコインが
are heads: 両方とも表となる確率
Bernoulli(0.25) 次はどちらか若しくは
Probability distribution 両方が裏となった場合
over firstCoin: に最初のコインが表と
Bernoulli(0.3333) なる条件付確率
17. if (!(ie.Algorithm is
VariationalMessagePassing))
{
Console.WriteLine("Probability
both coins are heads: "+ie.Infer(bothHeads));
bothHeads.ObservedValue=false;
Console.WriteLine("Probability
distribution over firstCoin: " + ie.Infer(firstCoin));
}
基本的にはObservedValueをセットして、ie.Infer
を呼ぶだけ
first example of Infer.NET from Microsoft Research
18. Range D = new Range(NumDocuments).Named("D");
Range W = new Range(SizeVocab).Named("W");
Range T = new Range(NumTopics).Named("T");
NumWordsInDoc = Variable.Array<int>(D).Named("NumWordsInDoc");
Range WInD = new Range(NumWordsInDoc[D]).Named("WInD");
// Surround model by a stochastic If block so that we can compute model evidence
Evidence = Variable.Bernoulli(0.5).Named("Evidence");
IfBlock evidenceBlock = null;
// We cannot calculate evidence in the power-plate version
if (!UsePowerPlate)
evidenceBlock = Variable.If(Evidence);
Theta = Variable.Array<Vector>(D);
Theta.SetSparsity(ThetaSparsity);
Theta.SetValueRange(T);
ThetaPrior = Variable.Array<Dirichlet>(D).Named("ThetaPrior");
Theta[D] = Variable<Vector>.Random(ThetaPrior[D]);
Phi = Variable.Array<Vector>(T);
Phi.SetSparsity(PhiSparsity);
Phi.SetValueRange(W);
PhiPrior = Variable.Array<Dirichlet>(T).Named("PhiPrior");
Phi[T] = Variable<Vector>.Random(PhiPrior[T]);
Words = Variable.Array(Variable.Array<int>(WInD), D).Named("Words");
WordCounts = Variable.Array(Variable.Array<double>(WInD), D).Named("WordCounts"
LDAModel.cs
19. int
numDocs = wordsInDoc.Length;
NumDocuments.ObservedValue =
numDocs;
int[] numWordsInDoc = new int[numDocs];
int[][] wordIndices = new int[numDocs][];
double[][] wordCounts = new
double[numDocs][];
20. NumWordsInDoc.ObservedValue = numWordsInDoc;
Words.ObservedValue = wordIndices;
WordCounts.ObservedValue = wordCounts;
ThetaInit.ObservedValue = GetInitialisation(numDocs, NumTopics,
ThetaSparsity);
ThetaPrior.ObservedValue = new Dirichlet[numDocs];
for (int i = 0; i < numDocs; i++) ThetaPrior.ObservedValue[i] =
Dirichlet.Symmetric(NumTopics, alpha);
PhiPrior.ObservedValue = new Dirichlet[NumTopics];
for (int i = 0; i < NumTopics; i++) PhiPrior.ObservedValue[i] =
Dirichlet.Symmetric(SizeVocab, beta);
if (UsePowerPlate)
Engine.OptimiseForVariables = new IVariable[] { Theta, Phi };
else
Engine.OptimiseForVariables = new IVariable[] { Theta, Phi,
Evidence };
postTheta = Engine.Infer<Dirichlet[]>(Theta);
postPhi = Engine.Infer<Dirichlet[]>(Phi);