Fix platform_string() usage, cut temporary space usage
diff --git a/tools/run_tests/run_microbenchmark.py b/tools/run_tests/run_microbenchmark.py
index 42a31a6..d51388b 100755
--- a/tools/run_tests/run_microbenchmark.py
+++ b/tools/run_tests/run_microbenchmark.py
@@ -74,6 +74,7 @@
 
 benchmarks = []
 profile_analysis = []
+cleanup = []
 
 for bm_name in sys.argv[1:]:
   # generate latency profiles
@@ -92,10 +93,20 @@
                         'tools/profiling/latency_profile/profile_analyzer.py',
                         '--source', '%s.trace' % fnize(line), '--fmt', 'simple',
                         '--out', 'reports/%s.txt' % fnize(line)], timeout_seconds=None))
-  
-  jobset.run(benchmarks, maxjobs=multiprocessing.cpu_count()/2,
-             add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port})
-  jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
+    cleanup.append('rm', '%s.trace' % fnize(line))
+    if len(benchmarks) >= 2 * multiprocessing.cpu_count():
+      jobset.run(benchmarks, maxjobs=multiprocessing.cpu_count()/2,
+                 add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port})
+      jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
+      jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
+      benchmarks = []
+      profile_analysis = []
+      cleanup = []
+  if len(benchmarks):
+    jobset.run(benchmarks, maxjobs=multiprocessing.cpu_count()/2,
+               add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port})
+    jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
+    jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
 
   # generate flamegraphs
   heading('Flamegraphs: %s' % bm_name)