in MHP.py [0:0]
def generate_seq(self, horizon):
'''Generate a sequence based on mu, alpha, omega values.
Uses Ogata's thinning method, with some speedups, noted below'''
self.data = [] # clear history
Istar = np.sum(self.mu)
s = np.random.exponential(scale=1. / Istar)
# attribute (weighted random sample, since sum(mu)==Istar)
n0 = np.random.choice(np.arange(self.dim),
1,
p=(self.mu / Istar))
self.data.append([s, n0])
# value of \lambda(t_k) where k is most recent event
# starts with just the base rate
lastrates = self.mu.copy()
decIstar = False
while True:
tj, uj = self.data[-1][0], int(self.data[-1][1])
if decIstar:
# if last event was rejected, decrease Istar
Istar = np.sum(rates)
decIstar = False
else:
# otherwise, we just had an event, so recalc Istar (inclusive of last event)
Istar = np.sum(lastrates) + \
self.omega * np.sum(self.alpha[:, uj])
# generate new event
s += np.random.exponential(scale=1. / Istar)
# calc rates at time s (use trick to take advantage of rates at last event)
rates = self.mu + np.exp(-self.omega * (s - tj)) * \
(self.alpha[:, uj].flatten() * self.omega + lastrates - self.mu)
# attribution/rejection test
# handle attribution and thinning in one step as weighted random sample
diff = Istar - np.sum(rates)
try:
n0 = np.random.choice(np.arange(self.dim + 1), 1,
p=(np.append(rates, diff) / Istar))
except ValueError:
# by construction this should not happen
print('Probabilities do not sum to one.')
self.data = np.array(self.data)
return self.data
if n0 < self.dim:
self.data.append([s, n0])
# update lastrates
lastrates = rates.copy()
else:
decIstar = True
# if past horizon, done
if s >= horizon:
self.data = np.array(self.data)
self.data = self.data[self.data[:, 0] < horizon]
return self.data