Question
# -*- coding: utf-8 -*- from random import choice import numpy as np import collections import csv import os from itertools import izip_longest (nservers,eenterqsize, etranstime,
# -*- coding: utf-8 -*- from random import choice import numpy as np import collections import csv import os from itertools import izip_longest (nservers,eenterqsize, etranstime, eexitqsize) =(1,0,0,0,) def runday(): arrivals = [0]*10 + [1]*15 + [2]*10 + [3]*35 + [4]*25 +[5]*05 services = [1]*25 + [2]*20 + [3]*40 + [4]*15 customerdata = [(0,choice(services))] + [(choice(arrivals), choice(services)) for i in range(149)] arrivaltimes = [0] + np.cumsum([c[0] for c in customerdata]) customers = collections.deque([(c[0],c[1],arrivaltimes[i]) for i, c in enumerate(customerdata)]) waiting=collections.deque() servers=[(False,0,False)]*nservers
time=0 dataWaits=[] dataQueueLen=[]
dataServerIdle=[] while True: while len(customers)>0 and customers[0][2]<=time: waiting.append(customers.popleft()) servers=[(active,lasts-1,isE) for active,lasts,isE in servers] servers=[(active,s,isE) if s!=0 else (False,0,isE) for active,s,isE in servers]
curDataServerIdle=0
if len(waiting)>0: (a,nexts,entrytime)=waiting.popleft() dataWaits+= [time-entrytime] servers[i]=(True,nexts) else: curDataServerIdle+=1
servers.append((True,True)) dataQueueLen+= [len(waiting)]
dataServerIdle+= [curDataServerIdle] print([len(customers),len(waiting),servers]) if len(waiting)==0 and len(customers)==0 and all([not active for active,t,isE in servers]): break time+=1 return ((dataWaits), mean(dataQueueLen), sum(dataServerIdle)
datas=[]; for n in range(10000): print(n) if mod(n,1000) == 0: print(n/1000) datas+=[runday()] # datas now contains all data from these trials.
Please help to complete the Python code: . We have 2 tables: Table 1: Time between arrival 0,1,2,3,4,5 minutes with probabilities 0.1, 0.15, 0.1, 0.35, 0.25, 0.05
Table2: Service Times 1, 2, 3, 4 with probabilities 0.25, 0.2, 0.4, 0.15
150 customer are serve a day. The code above must display the average waiting time before customer being served. First for the program must display waiting time for 1 server than for 2 servers. The theoretical time for 1 server is: 4.927minutes and 2 servers is 0.11285 minutes. Struggling to complete the code. Can you give plot too. Thank you!
Step by Step Solution
There are 3 Steps involved in it
Step: 1
Get Instant Access to Expert-Tailored Solutions
See step-by-step solutions with expert insights and AI powered tools for academic success
Step: 2
Step: 3
Ace Your Homework with AI
Get the answers you need in no time with our AI-driven, step-by-step assistance
Get Started