88Usage:
99 python stress_test.py --host HOST_URL [options]
1010
11+ WARNING:
12+ This stress test script is intended for use in a mock environment where both
13+ REDCap and GBF integrations are mocked.
14+
1115Options:
1216 --host Base host URL (required)
1317 --rps Requests per second (default: 20)
3640import json
3741import argparse
3842
39-
4043def parse_args ():
4144 parser = argparse .ArgumentParser (description = 'Run stress test for edrop API' )
4245 parser .add_argument ('--host' , required = True ,
43- help = 'Base host URL (required)' )
46+ help = 'Base host URL (required)' )
4447 parser .add_argument ('--rps' , type = int , default = 20 ,
45- help = 'Requests per second (default: 20)' )
48+ help = 'Requests per second (default: 20)' )
4649 parser .add_argument ('--duration' , type = int , default = 60 ,
47- help = 'Total duration in seconds (default: 60)' )
50+ help = 'Total duration in seconds (default: 60)' )
4851 return parser .parse_args ()
4952
5053
51- # Test payload for the API requests
52- TEST_PAYLOAD = {
53- 'instrument' : 'consent' ,
54- 'record' : '999999' , # this is a test record id hence it will not exist in the database
54+ BASE_TEST_PAYLOAD = {
55+ 'instrument' : 'contact' ,
5556 'project_id' : 'test' ,
5657 'project_url' : 'http://test.com' ,
5758 'contact_complete' : '2'
5859}
5960
6061async def make_request (session , request_id , base_url ):
62+ """
63+ Sends a single POST request to the API, returning detailed results
64+ including status code, response time, and response body.
65+ """
66+ # Vary the record field to ensure each request is unique
67+ payload = BASE_TEST_PAYLOAD .copy ()
68+ payload ['record' ] = str (999999 + request_id )
69+
6170 start_time = time .time ()
6271 try :
63- async with session .post (base_url , data = TEST_PAYLOAD ) as response :
72+ async with session .post (base_url , data = payload ) as response :
6473 duration = time .time () - start_time
6574 status = response .status
6675 try :
@@ -88,7 +97,7 @@ async def run_load_test(base_url, requests_per_second, duration):
8897 results = []
8998 request_counter = 0
9099
91- # Calculate delay between requests
100+ # Calculate delay between requests to maintain the RPS
92101 delay = 1.0 / requests_per_second
93102
94103 async with aiohttp .ClientSession () as session :
@@ -98,12 +107,12 @@ async def run_load_test(base_url, requests_per_second, duration):
98107 tasks = []
99108 batch_start = time .time ()
100109
101- # Create a batch of requests
110+ # Create a single request task each loop iteration
102111 task = asyncio .create_task (make_request (session , request_counter , base_url ))
103112 tasks .append (task )
104113 request_counter += 1
105114
106- # Wait for the batch to complete
115+ # Wait for the request to complete
107116 batch_results = await asyncio .gather (* tasks )
108117 results .extend (batch_results )
109118
@@ -112,14 +121,14 @@ async def run_load_test(base_url, requests_per_second, duration):
112121 if elapsed < delay :
113122 await asyncio .sleep (delay - elapsed )
114123
115- # Print progress
116124 print (f"\r Requests sent: { request_counter } , Elapsed time: { int (time .time () - start_time )} s" , end = '' )
117125
118126 return results
119127
120128async def main ():
121129 args = parse_args ()
122130
131+ # Target URL <host>/api/order/create
123132 base_url = f"{ args .host } api/order/create"
124133 requests_per_second = args .rps
125134 duration = args .duration
@@ -145,4 +154,4 @@ async def main():
145154 print ("\n Detailed results saved to load_test_results.json" )
146155
147156if __name__ == "__main__" :
148- asyncio .run (main ())
157+ asyncio .run (main ())
0 commit comments