Table of Content

Introduction

Below is a program that simulates the Single-Server Queue.

#
# This program is written "verbosely"  following the algorithms
# presented in the lecture notes. It can be simplified and "vectorized"
#
from matplotlib import pyplot as plt
import math
import numpy as np
import random

def exponential(m):
    #
    # use random.random() is a bad choice. 
    #
    # should use # multistream random number generations to generate two 
    # indepedent streams # of pseudo random numbers, one for departure
    # and the other arrival
    #
    return -m * math.log(1.0 - random.random())

def get_arrival(p, r):
    return p + exponential(1./r)

def get_service(m):
    return exponential(m)

# arate: arrival rate, or lambda
# smean: the mean of service time or 1/mu
def ssq(arate, smean, numpackets):
    c = np.zeros(numpackets+1)
    d = np.zeros(numpackets+1)
    a = np.zeros(numpackets+1)
    s = np.zeros(numpackets+1)
    for i in range(1, numpackets+1):
        a[i] = get_arrival(a[i-1], arate)
        if a[i] < c[i-1]:
            d[i] = c[i-1] - a[i]
        # else:
        #    d[i] = 0.
        s[i] = get_service(smean)
        c[i] = a[i] + d[i] + s[i]
    return a,s,c,d


def traffic_load(arate, smean):
    return arate*smean;

def nom_throughput(s, c, n):
    return n * np.mean(s[1:]) / c[n];

def wait_time(s, d):
    return s + d

def delay_time_averaged(arate, s, d):
    w = wait_time(s, d)
    return arate * np.mean(w[1:])

def delay_packet_averaged(s, d):
    w = wait_time(s, d)
    return np.mean(w[1:])

def simulate_n_runs(smeans, arates, numpkts):
    atimes = np.zeros((nruns, numpkts+1))
    stimes = np.zeros((nruns, numpkts+1))
    ctimes = np.zeros((nruns, numpkts+1))
    dtimes = np.zeros((nruns, numpkts+1))
    tloads = np.zeros(nruns)
    throuputs = np.zeros(nruns)
    dtaveraged = np.zeros(nruns)
    dpaveraged = np.zeros(nruns)

    for i,(smean,arate) in enumerate(zip(smeans, arates)):
        print(i, smean, arate)
        atimes[i, :],stimes[i, :],ctimes[i, :],dtimes[i, :] = ssq(arate, smean, numpkts)
        tloads[i] = traffic_load(arate, smean)
        throuputs[i] = nom_throughput(stimes[i, :], ctimes[i, :], numpkts)
        dtaveraged[i] = delay_time_averaged(arate, stimes[i, :], dtimes[i, :])
        dpaveraged[i] = delay_packet_averaged(stimes[i, :], dtimes[i, :])
    return tloads,throuputs,dtaveraged,dpaveraged 

def plot_and_show_graphs(tloads,throuputs,dtaveraged,dpaveraged):
    plt.subplot(131)
    plt.plot(tloads, throuputs)
    plt.xlabel('Normalized load')
    plt.ylabel('Normalized throughput')

    plt.subplot(132)
    plt.plot(tloads, dtaveraged)
    plt.xlabel('Normalized load')
    plt.ylabel('Time-Averaged Delay')

    plt.subplot(133)
    plt.plot(tloads, dpaveraged)
    plt.xlabel('Normalized load')
    plt.ylabel('Normalized throughput')
    plt.show()

if __name__ == '__main__':
    numpkts = 100000
    nruns = 22
    smeans = np.ones(nruns)
    arates = np.zeros(nruns)
    arates[0:9] = np.arange(9)*0.1 + 0.1
    arates[9:(9+10)] = np.arange(10)*0.01 + arates[8] + 0.01
    arates[19:(19+11)] = arates[18] + np.arange(3)*0.1 + 0.1

    tloads,throuputs,dtaveraged,dpaveraged \
            = simulate_n_runs(smeans, arates, numpkts)

    plot_and_show_graphs(tloads,throuputs,dtaveraged,dpaveraged)

Experiments and Exploration

  • Run the program and observe the output
  • Revise the program to compare the simulation results with the close solution presented in the lecture notes
  • Use the result to explain the concept of congestion