%This is a sample code to show how to simulate a discrete time Markov
%chain with a given transition probability matrix.
%The inputs:
% n = the number of steps to take.
%The ouput:
% chain = n+1 states of the chain(the + 1 is for the first step).
%
% More importantly, the code produces a plot. You can convert the plot to
% a .pdf to turn it in.
%
%How to call this function: put this script into the folder in which
%Matlab is open. Then type the following into Matlab:
% [X] = DTMC_SimulationOne(100);
%Note that my transition matrix is *not* the one you will need to use. In
%fact, my state space is {1,2,3}, whereas your state space is {1,2,3,4}.
function [X] = DTMC_SimulationOne(n)
%This is the vector that will hold the entire realization of the chain.
%For example, X(1) is the initial state (1 in our case), X(2) is the second
%state of the chain, etc.
X = zeros([n+1,1]);
%This is the transition matrix of my imaginary chain. Your transition
%mattrix is different.
P = [ 1/3 2/3 0;
1/4 1/2 1/4;
1 0 0];
%Set the initial condition. The first state will be determined by an
%initial distribution of (1/10,2/10,7/10).
r = rand;
alpha = [1/10, 2/10, 7/10];
if r < alpha(1)
X(1) = 1;
elseif r < alpha(1) + alpha(2)
X(1) = 2;
else
X(1) = 3;
end
%The main FOR LOOP. It runs from 1 through n, and updates the state based
%upon the previous state.
for j = 1:n
%Generate a uniform random variable.
r = rand;
if r < P(X(j),1)
X(j+1) = 1;
elseif r < P(X(j),1) + P(X(j),2)
X(j+1) = 2;
else
X(j+1) = 3;
end
end
%I want to plot the chain vs time. To do so, I need a vector holding time.
% Tn will be that vector.
Tn = zeros([n+1,1]);
for k = 1:n
Tn(k+1) = Tn(k) + 1;
end
stairs(Tn,X)
end