Changeset 7519 for main/waeup.sirp/branches/ulif-stress-multimech
- Timestamp:
- 27 Jan 2012, 12:51:05 (13 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
main/waeup.sirp/branches/ulif-stress-multimech/multi-mechanize/multi-mechanize.py
r7478 r7519 2 2 # 3 3 # Copyright (c) 2010-2011 Corey Goldberg (corey@goldb.org) 4 # License: GNU LGPLv3 - distributed under the terms of the GNU Lesser General Public License version 3 5 # 4 # License: GNU LGPLv3 - distributed under the terms of the GNU Lesser 5 # General Public License version 3 6 # 6 7 # This file is part of Multi-Mechanize: 7 # Multi-Process, Multi-Threaded, Web Load Generator, with python-mechanize agents 8 # Multi-Process, Multi-Threaded, Web Load Generator, with 9 # python-mechanize agents 8 10 # 9 11 # requires Python 2.6+ 10 11 12 13 12 import ConfigParser 14 13 import glob … … 23 22 import time 24 23 import lib.results as results 25 import lib.progressbar as progressbar 24 import lib.progressbar as progressbar 26 25 27 26 usage = 'Usage: %prog <project name> [options]' 28 27 parser = optparse.OptionParser(usage=usage) 29 parser.add_option('-p', '--port', dest='port', type='int', help='rpc listener port') 30 parser.add_option('-r', '--results', dest='results_dir', help='results directory to reprocess') 28 parser.add_option( 29 '-p', '--port', dest='port', type='int', help='rpc listener port') 30 parser.add_option( 31 '-r', '--results', dest='results_dir', 32 help='results directory to reprocess') 31 33 cmd_opts, args = parser.parse_args() 32 34 … … 37 39 sys.stderr.write('usage: python multi-mechanize.py <project_name>\n') 38 40 sys.stderr.write('example: python multi-mechanize.py default_project\n\n') 39 sys.exit(1) 41 sys.exit(1) 40 42 41 43 scripts_path = 'projects/%s/test_scripts' % project_name 42 44 if not os.path.exists(scripts_path): 43 45 sys.stderr.write('\nERROR: can not find project: %s\n\n' % project_name) 44 sys.exit(1) 45 sys.path.append(scripts_path) 46 for f in glob.glob( '%s/*.py' % scripts_path): # import all test scripts as modules 46 sys.exit(1) 47 sys.path.append(scripts_path) 48 for f in glob.glob( '%s/*.py' % scripts_path): 49 # import all test scripts as modules 47 50 f = f.replace(scripts_path, '').replace(os.sep, '').replace('.py', '') 48 51 exec('import %s' % f) … … 56 59 import lib.rpcserver 57 60 lib.rpcserver.launch_rpc_server(cmd_opts.port, project_name, run_test) 58 else: 61 else: 59 62 run_test() 60 63 return 61 62 63 64 65 64 66 def run_test(remote_starter=None): 65 67 if remote_starter is not None: 66 68 remote_starter.test_running = True 67 69 remote_starter.output_dir = None 68 69 run_time, rampup, results_ts_interval, console_logging, progress_bar, results_database, post_run_script, xml_report, user_group_configs = configure(project_name) 70 71 run_localtime = time.localtime() 72 output_dir = time.strftime('projects/' + project_name + '/results/results_%Y.%m.%d_%H.%M.%S/', run_localtime) 73 70 (run_time, rampup, results_ts_interval, console_logging, progress_bar, 71 results_database, post_run_script, xml_report, 72 user_group_configs) = configure(project_name) 73 74 run_localtime = time.localtime() 75 output_dir = time.strftime( 76 'projects/' + project_name 77 + '/results/results_%Y.%m.%d_%H.%M.%S/', run_localtime) 78 74 79 # this queue is shared between all processes/threads 75 80 queue = multiprocessing.Queue() … … 77 82 rw.daemon = True 78 83 rw.start() 79 80 user_groups = [] 84 85 user_groups = [] 81 86 for i, ug_config in enumerate(user_group_configs): 82 ug = UserGroup(queue, i, ug_config.name, ug_config.num_threads, ug_config.script_file, run_time, rampup) 83 user_groups.append(ug) 87 ug = UserGroup(queue, i, ug_config.name, ug_config.num_threads, 88 ug_config.script_file, run_time, rampup) 89 user_groups.append(ug) 84 90 for user_group in user_groups: 85 91 user_group.start() 86 87 start_time = time.time() 88 92 93 start_time = time.time() 94 89 95 if console_logging: 90 96 for user_group in user_groups: … … 93 99 print '\n user_groups: %i' % len(user_groups) 94 100 print ' threads: %i\n' % (ug_config.num_threads * len(user_groups)) 95 101 96 102 if progress_bar: 97 103 p = progressbar.ProgressBar(run_time) … … 100 106 p.update_time(elapsed) 101 107 if sys.platform.startswith('win'): 102 print '%s transactions: %i timers: %i errors: %i\r' % (p, rw.trans_count, rw.timer_count, rw.error_count), 108 print '%s transactions: %i timers: %i errors: %i\r' % ( 109 p, rw.trans_count, rw.timer_count, rw.error_count), 103 110 else: 104 print '%s transactions: %i timers: %i errors: %i' % (p, rw.trans_count, rw.timer_count, rw.error_count) 111 print '%s transactions: %i timers: %i errors: %i' % ( 112 p, rw.trans_count, rw.timer_count, rw.error_count) 105 113 sys.stdout.write(chr(27) + '[A' ) 106 114 time.sleep(1) 107 115 elapsed = time.time() - start_time 108 109 116 print p 110 111 while [user_group for user_group in user_groups if user_group.is_alive()] != []: 117 118 while [user_group for user_group in user_groups 119 if user_group.is_alive()] != []: 112 120 if progress_bar: 113 121 if sys.platform.startswith('win'): … … 117 125 sys.stdout.write(chr(27) + '[A' ) 118 126 time.sleep(.5) 119 127 120 128 if not sys.platform.startswith('win'): 121 129 print … … 124 132 time.sleep(.2) # make sure the writer queue is flushed 125 133 print '\n\nanalyzing results...\n' 126 results.output_results(output_dir, 'results.csv', run_time, rampup, results_ts_interval, user_group_configs, xml_report) 134 results.output_results( 135 output_dir, 'results.csv', run_time, rampup, results_ts_interval, 136 user_group_configs, xml_report) 127 137 print 'created: %sresults.html\n' % output_dir 128 138 if xml_report: 129 139 print 'created: %sresults.jtl' % output_dir 130 140 print 'created: last_results.jtl\n' 131 141 132 142 # copy config file to results directory 133 143 project_config = os.sep.join(['projects', project_name, 'config.cfg']) 134 144 saved_config = os.sep.join([output_dir, 'config.cfg']) 135 145 shutil.copy(project_config, saved_config) 136 146 137 147 if results_database is not None: 138 148 print 'loading results into database: %s\n' % results_database 139 149 import lib.resultsloader 140 lib.resultsloader.load_results_database(project_name, run_localtime, output_dir, results_database, 141 run_time, rampup, results_ts_interval, user_group_configs) 142 150 lib.resultsloader.load_results_database( 151 project_name, run_localtime, output_dir, results_database, 152 run_time, rampup, results_ts_interval, user_group_configs) 153 143 154 if post_run_script is not None: 144 155 print 'running post_run_script: %s\n' % post_run_script 145 156 subprocess.call(post_run_script) 146 157 147 158 print 'done.\n' 148 159 149 160 if remote_starter is not None: 150 161 remote_starter.test_running = False 151 162 remote_starter.output_dir = output_dir 152 153 163 return 154 155 156 164 165 166 157 167 def rerun_results(results_dir): 158 168 output_dir = 'projects/%s/results/%s/' % (project_name, results_dir) 159 169 saved_config = '%s/config.cfg' % output_dir 160 run_time, rampup, results_ts_interval, console_logging, progress_bar, results_database, post_run_script, xml_report, user_group_configs = configure(project_name, config_file=saved_config) 170 (run_time, rampup, results_ts_interval, console_logging, progress_bar, 171 results_database, post_run_script, xml_report, 172 user_group_configs) = configure(project_name, config_file=saved_config) 161 173 print '\n\nanalyzing results...\n' 162 results.output_results(output_dir, 'results.csv', run_time, rampup, results_ts_interval, user_group_configs, xml_report) 174 results.output_results( 175 output_dir, 'results.csv', run_time, rampup, results_ts_interval, 176 user_group_configs, xml_report) 163 177 print 'created: %sresults.html\n' % output_dir 164 178 if xml_report: 165 179 print 'created: %sresults.jtl' % output_dir 166 180 print 'created: last_results.jtl\n' 167 181 168 182 169 183 … … 179 193 rampup = config.getint(section, 'rampup') 180 194 results_ts_interval = config.getint(section, 'results_ts_interval') 181 try: 195 try: 182 196 console_logging = config.getboolean(section, 'console_logging') 183 197 except ConfigParser.NoOptionError: 184 198 console_logging = False 185 try: 199 try: 186 200 progress_bar = config.getboolean(section, 'progress_bar') 187 201 except ConfigParser.NoOptionError: … … 208 222 user_group_configs.append(ug_config) 209 223 210 return (run_time, rampup, results_ts_interval, console_logging, progress_bar, results_database, post_run_script, xml_report, user_group_configs) 211 224 return (run_time, rampup, results_ts_interval, console_logging, 225 progress_bar, results_database, post_run_script, xml_report, 226 user_group_configs) 227 212 228 213 229 … … 217 233 self.name = name 218 234 self.script_file = script_file 219 220 221 235 236 237 222 238 class UserGroup(multiprocessing.Process): 223 def __init__(self, queue, process_num, user_group_name, num_threads, script_file, run_time, rampup): 239 def __init__(self, queue, process_num, user_group_name, num_threads, 240 script_file, run_time, rampup): 224 241 multiprocessing.Process.__init__(self) 225 242 self.queue = queue … … 231 248 self.rampup = rampup 232 249 self.start_time = time.time() 233 250 234 251 def run(self): 235 252 threads = [] … … 238 255 if i > 0: 239 256 time.sleep(spacing) 240 agent_thread = Agent(self.queue, self.process_num, i, self.start_time, self.run_time, self.user_group_name, self.script_file) 257 agent_thread = Agent(self.queue, self.process_num, i, 258 self.start_time, self.run_time, 259 self.user_group_name, self.script_file) 241 260 agent_thread.daemon = True 242 261 threads.append(agent_thread) 243 agent_thread.start() 262 agent_thread.start() 244 263 for agent_thread in threads: 245 264 agent_thread.join() 246 265 247 266 248 267 249 268 class Agent(threading.Thread): 250 def __init__(self, queue, process_num, thread_num, start_time, run_time, user_group_name, script_file): 269 def __init__(self, queue, process_num, thread_num, start_time, 270 run_time, user_group_name, script_file): 251 271 threading.Thread.__init__(self) 252 272 self.queue = queue … … 257 277 self.user_group_name = user_group_name 258 278 self.script_file = script_file 259 260 # choose most accurate timer to use (time.clock has finer granularity than time.time on windows, but shouldn't be used on other systems) 279 280 # choose most accurate timer to use (time.clock has finer 281 # granularity than time.time on windows, but shouldn't be used 282 # on other systems) 261 283 if sys.platform.startswith('win'): 262 284 self.default_timer = time.clock 263 285 else: 264 286 self.default_timer = time.time 265 266 287 288 267 289 def run(self): 268 290 elapsed = 0 269 291 270 292 if self.script_file.lower().endswith('.py'): 271 293 module_name = self.script_file.replace('.py', '') 272 294 else: 273 sys.stderr.write('ERROR: scripts must have .py extension. can not run test script: %s. aborting user group: %s\n' % (self.script_file, self.user_group_name)) 295 sys.stderr.write( 296 'ERROR: scripts must have .py extension. can not run ' 297 'test script: %s. aborting user group: %s\n' % ( 298 self.script_file, self.user_group_name)) 274 299 return 275 300 try: 276 301 trans = eval(module_name + '.Transaction()') 277 302 except NameError, e: 278 sys.stderr.write('ERROR: can not find test script: %s. aborting user group: %s\n' % (self.script_file, self.user_group_name)) 303 sys.stderr.write( 304 'ERROR: can not find test script: %s. aborting user ' 305 'group: %s\n' % (self.script_file, self.user_group_name)) 279 306 return 280 307 except Exception, e: 281 sys.stderr.write('ERROR: failed initializing Transaction: %s. aborting user group: %s\n' % (self.script_file, self.user_group_name)) 308 sys.stderr.write( 309 'ERROR: failed initializing Transaction: %s. aborting ' 310 'user group: %s\n' % (self.script_file, self.user_group_name)) 282 311 return 283 312 284 313 trans.custom_timers = {} 285 286 # scripts have access to these vars, which can be useful for loading unique data 314 315 # scripts have access to these vars, which can be useful for 316 # loading unique data 287 317 trans.thread_num = self.thread_num 288 318 trans.process_num = self.process_num 289 319 290 320 while elapsed < self.run_time: 291 321 error = '' 292 start = self.default_timer() 293 322 start = self.default_timer() 323 294 324 try: 295 325 trans.run() 296 except Exception, e: # test runner catches all script exceptions here 326 except Exception, e: # test runner catches all script 327 # exceptions here 297 328 error = str(e).replace(',', '') 298 329 299 330 finish = self.default_timer() 300 331 301 332 scriptrun_time = finish - start 302 elapsed = time.time() - self.start_time 333 elapsed = time.time() - self.start_time 303 334 304 335 epoch = time.mktime(time.localtime()) 305 306 fields = (elapsed, epoch, self.user_group_name, scriptrun_time, error, trans.custom_timers) 336 337 fields = (elapsed, epoch, self.user_group_name, 338 scriptrun_time, error, trans.custom_timers) 307 339 self.queue.put(fields) 308 340 309 341 310 342 … … 318 350 self.timer_count = 0 319 351 self.error_count = 0 320 352 321 353 try: 322 354 os.makedirs(self.output_dir, 0755) 323 355 except OSError: 324 356 sys.stderr.write('ERROR: Can not create output directory\n') 325 sys.exit(1) 326 357 sys.exit(1) 358 327 359 def run(self): 328 with open(self.output_dir + 'results.csv', 'w') as f: 360 with open(self.output_dir + 'results.csv', 'w') as f: 329 361 while True: 330 362 try: 331 elapsed, epoch, self.user_group_name, scriptrun_time, error, custom_timers = self.queue.get(False) 363 (elapsed, epoch, self.user_group_name, scriptrun_time, 364 error, custom_timers) = self.queue.get(False) 332 365 self.trans_count += 1 333 366 self.timer_count += len(custom_timers) 334 367 if error != '': 335 368 self.error_count += 1 336 f.write('%i,%.3f,%i,%s,%f,%s,%s\n' % (self.trans_count, elapsed, epoch, self.user_group_name, scriptrun_time, error, repr(custom_timers))) 369 f.write('%i,%.3f,%i,%s,%f,%s,%s\n' % ( 370 self.trans_count, elapsed, epoch, 371 self.user_group_name, scriptrun_time, error, 372 repr(custom_timers))) 337 373 f.flush() 338 374 if self.console_logging: 339 print '%i, %.3f, %i, %s, %.3f, %s, %s' % (self.trans_count, elapsed, epoch, self.user_group_name, scriptrun_time, error, repr(custom_timers)) 375 print '%i, %.3f, %i, %s, %.3f, %s, %s' % ( 376 self.trans_count, elapsed, epoch, 377 self.user_group_name, scriptrun_time, error, 378 repr(custom_timers)) 340 379 except Queue.Empty: 341 380 time.sleep(.05) 342 381 343 344 345 382 if __name__ == '__main__': 346 383 main()
Note: See TracChangeset for help on using the changeset viewer.