Ignore:
Timestamp:
Aug 28, 2014, 6:04:44 PM (10 years ago)
Author:
meunier
Message:

Trunk:

  • Updating python scripts for simulations and graphs for tsar_generic_xbar (support for rwt and mesi)
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/platforms/tsar_generic_xbar/scripts/create_graphs.py

    r749 r779  
    44import os
    55import re
    6 
    7 
    8 
     6import sys
     7
     8
     9#apps = [ 'histogram', 'mandel', 'filter', 'radix', 'fft_ga' ]
    910apps = [ 'histogram', 'mandel', 'filter', 'radix', 'radix_ga', 'fft', 'fft_ga', 'filt_ga', 'kmeans', 'pca', 'lu' ]
     11#apps = [ 'fft' ]
    1012nb_procs = [ 1, 4, 8, 16, 32, 64, 128, 256 ]
     13single_protocols = ['dhccp']
     14#joint_protocols = ['dhccp', 'rwt']
     15joint_protocols = []
    1116
    1217top_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
     
    2126data_dir = 'data'
    2227
    23 log_init_name = 'log_init_'
    24 log_term_name = 'log_term_'
     28log_stdo_name = '_stdo_'
     29log_term_name = '_term_'
    2530
    2631coherence_tmpl = os.path.join(scripts_path, template_dir, 'coherence_template.gp') # 1 graph per appli
     
    7075exec_time = {}
    7176metrics_val = {}
    72 for app in apps:
    73    exec_time[app] = {}
    74    metrics_val[app] = {}
    75    for i in nb_procs:
    76       metrics_val[app][i] = {}
    77       log_init_file = os.path.join(scripts_path, data_dir, app + '_' + log_init_name + str(i))
    78       log_term_file = os.path.join(scripts_path, data_dir, app + '_' + log_term_name + str(i))
    79 
    80       # Term
    81       lines = open(log_term_file, 'r')
    82       for line in lines:
    83          tokens = line[:-1].split()
    84          if len(tokens) > 0 and tokens[0] == "[PARALLEL_COMPUTE]":
    85             exec_time[app][i] = int(tokens[len(tokens) - 1])
    86 
    87       # Init files
    88       lines = open(log_init_file, 'r')
    89       for line in lines:
    90          tokens = line[:-1].split()
    91          if len(tokens) == 0:
    92             continue
    93          tag = tokens[0]
    94          value = tokens[len(tokens) - 1]
    95          pattern = re.compile('\[0[0-9][0-9]\]')
    96          if pattern.match(tag):
    97             metric = m_metric_id[tag]
    98             if (not metrics_val[app][i].has_key(metric) or tag == "[000]" or tag == "[001]"):
    99                # We don't add cycles of all Memcaches (they must be the same for all)
    100                metrics_val[app][i][metric] = int(value)
    101             else:
    102                metrics_val[app][i][metric] += int(value)
    103            
     77for prot in single_protocols:
     78   metrics_val[prot] = {}
     79   exec_time[prot] = {}
     80   for app in apps:
     81      exec_time[prot][app] = {}
     82      metrics_val[prot][app] = {}
     83      for i in nb_procs:
     84         metrics_val[prot][app][i] = {}
     85         log_stdo_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_stdo_name + str(i))
     86         log_term_file = os.path.join(scripts_path, data_dir, app + '_' + prot + log_term_name + str(i))
     87   
     88         # Term
     89         lines = open(log_term_file, 'r')
     90         for line in lines:
     91            tokens = line[:-1].split()
     92            if len(tokens) > 0 and tokens[0] == "[PARALLEL_COMPUTE]":
     93               exec_time[prot][app][i] = int(tokens[len(tokens) - 1])
     94   
     95         # Init files
     96         lines = open(log_stdo_file, 'r')
     97         for line in lines:
     98            tokens = line[:-1].split()
     99            if len(tokens) == 0:
     100               continue
     101            tag = tokens[0]
     102            value = tokens[len(tokens) - 1]
     103            pattern = re.compile('\[0[0-9][0-9]\]')
     104            if pattern.match(tag):
     105               metric = m_metric_id[tag]
     106               if (not metrics_val[prot][app][i].has_key(metric) or tag == "[000]" or tag == "[001]"):
     107                  # We don't add cycles of all Memcaches (they must be the same for all)
     108                  metrics_val[prot][app][i][metric] = int(value)
     109               else:
     110                  metrics_val[prot][app][i][metric] += int(value)
     111   
     112# Completing unset metrics (i.e. they are not present in the data file) with 0
     113for prot in single_protocols:
     114   for app in apps:
     115      for i in nb_procs:
     116         for metric in all_metrics:
     117            if metric not in metrics_val[prot][app][i]:
     118               metrics_val[prot][app][i][metric] = 0
     119
    104120# We make a 2nd pass to fill the derived fields, e.g. nb_total_updates
    105 for app in apps:
    106    for i in nb_procs:
    107       x, y = get_x_y(i)
    108       metrics_val[app][i]['total_read']     = metrics_val[app][i]['local_read']    + metrics_val[app][i]['remote_read']
    109       metrics_val[app][i]['total_write']    = metrics_val[app][i]['local_write']   + metrics_val[app][i]['remote_write']
    110       metrics_val[app][i]['total_ll']       = metrics_val[app][i]['local_ll']      + metrics_val[app][i]['remote_ll']
    111       metrics_val[app][i]['total_sc']       = metrics_val[app][i]['local_sc']      + metrics_val[app][i]['remote_sc']
    112       metrics_val[app][i]['total_cas']      = metrics_val[app][i]['local_cas']     + metrics_val[app][i]['remote_cas']
    113       metrics_val[app][i]['total_update']   = metrics_val[app][i]['local_update']  + metrics_val[app][i]['remote_update']
    114       metrics_val[app][i]['total_m_inv']    = metrics_val[app][i]['local_m_inv']   + metrics_val[app][i]['remote_m_inv']
    115       metrics_val[app][i]['total_cleanup']  = metrics_val[app][i]['local_cleanup'] + metrics_val[app][i]['remote_cleanup']
    116       metrics_val[app][i]['total_direct']   = metrics_val[app][i]['total_read']    + metrics_val[app][i]['total_write']
    117       metrics_val[app][i]['direct_cost']    = metrics_val[app][i]['read_cost']     + metrics_val[app][i]['write_cost']
    118       metrics_val[app][i]['broadcast_cost'] = metrics_val[app][i]['broadcast'] * (x * y - 1)
    119       if metrics_val[app][i]['broadcast'] < metrics_val[app][i]['write_broadcast']:
    120          # test to patch a bug in mem_cache
    121          metrics_val[app][i]['nonwrite_broadcast'] = 0
    122       else:
    123          metrics_val[app][i]['nonwrite_broadcast'] = metrics_val[app][i]['broadcast'] - metrics_val[app][i]['write_broadcast']
    124 
    125       metrics_val[app][i]['total_stacked'] = 0
    126       for stacked_metric in stacked_metrics:
    127          metrics_val[app][i]['total_stacked'] += metrics_val[app][i][stacked_metric]
     121for prot in single_protocols:
     122   for app in apps:
     123      for i in nb_procs:
     124         x, y = get_x_y(i)
     125         metrics_val[prot][app][i]['total_read']      = metrics_val[prot][app][i]['local_read']     + metrics_val[prot][app][i]['remote_read']
     126         metrics_val[prot][app][i]['total_write']     = metrics_val[prot][app][i]['local_write']    + metrics_val[prot][app][i]['remote_write']
     127         metrics_val[prot][app][i]['total_ll']        = metrics_val[prot][app][i]['local_ll']       + metrics_val[prot][app][i]['remote_ll']
     128         metrics_val[prot][app][i]['total_sc']        = metrics_val[prot][app][i]['local_sc']       + metrics_val[prot][app][i]['remote_sc']
     129         metrics_val[prot][app][i]['total_cas']       = metrics_val[prot][app][i]['local_cas']      + metrics_val[prot][app][i]['remote_cas']
     130         metrics_val[prot][app][i]['total_update']    = metrics_val[prot][app][i]['local_update']   + metrics_val[prot][app][i]['remote_update']
     131         metrics_val[prot][app][i]['total_m_inv']     = metrics_val[prot][app][i]['local_m_inv']    + metrics_val[prot][app][i]['remote_m_inv']
     132         metrics_val[prot][app][i]['total_cleanup']   = metrics_val[prot][app][i]['local_cleanup']  + metrics_val[prot][app][i]['remote_cleanup']
     133         metrics_val[prot][app][i]['total_direct']    = metrics_val[prot][app][i]['total_read']     + metrics_val[prot][app][i]['total_write']
     134         metrics_val[prot][app][i]['total_ncc_to_cc'] = metrics_val[prot][app][i]['ncc_to_cc_read'] + metrics_val[prot][app][i]['ncc_to_cc_write']
     135         metrics_val[prot][app][i]['direct_cost']     = metrics_val[prot][app][i]['read_cost']      + metrics_val[prot][app][i]['write_cost']
     136         metrics_val[prot][app][i]['broadcast_cost']  = metrics_val[prot][app][i]['broadcast'] * (x * y - 1)
     137         if metrics_val[prot][app][i]['broadcast'] < metrics_val[prot][app][i]['write_broadcast']:
     138            # test to patch a bug in mem_cache
     139            metrics_val[prot][app][i]['nonwrite_broadcast'] = 0
     140         else:
     141            metrics_val[prot][app][i]['nonwrite_broadcast'] = metrics_val[prot][app][i]['broadcast'] - metrics_val[prot][app][i]['write_broadcast']
     142   
     143         metrics_val[prot][app][i]['total_stacked'] = 0
     144         for stacked_metric in stacked_metrics:
     145            metrics_val[prot][app][i]['total_stacked'] += metrics_val[prot][app][i][stacked_metric]
    128146
    129147           
     
    138156############################################################
    139157
    140 for app in apps:
    141    data_coherence_name = os.path.join(scripts_path, gen_dir, app + '_coherence.dat')
    142    gp_coherence_name   = os.path.join(scripts_path, gen_dir, app + '_coherence.gp')
    143 
    144    # Creating the data file
     158for prot in single_protocols:
     159   for app in apps:
     160      data_coherence_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.dat')
     161      gp_coherence_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_coherence.gp')
     162   
     163      # Creating the data file
     164      width = 15
     165      content = ""
     166     
     167      for metric in [ '#nb_procs' ] + grouped_metrics:
     168         content += metric + " "
     169         nb_spaces = width - len(metric)
     170         content += nb_spaces * ' '
     171      content += "\n"
     172   
     173      for i in nb_procs:
     174         content += "%-15d " % i
     175         for metric in grouped_metrics:
     176            val = float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000
     177            content += "%-15f " % val
     178         content += "\n"
     179     
     180      create_file(data_coherence_name, content)
     181   
     182      # Creating the gp file
     183      template_file = open(coherence_tmpl, 'r')
     184      template = template_file.read()
     185     
     186      plot_str = ""
     187      col = 2
     188      for metric in grouped_metrics:
     189         if metric != grouped_metrics[0]:
     190            plot_str += ", \\\n    "
     191         plot_str += "\"" + data_coherence_name + "\" using ($1):($" + str(col) + ") lc rgb " + colors[col - 2] + " title \"" + m_metric_name[metric] + "\" with linespoint"
     192         col += 1
     193      gp_commands = template % dict(app_name = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_coherence'))
     194     
     195      create_file(gp_coherence_name, gp_commands)
     196     
     197      # Calling gnuplot
     198      print "gnuplot", gp_coherence_name
     199      subprocess.call([ 'gnuplot', gp_coherence_name ])
     200
     201
     202############################################################
     203### Graph 2 : Speedup per Application                    ###
     204############################################################
     205
     206for prot in single_protocols:
     207   for app in apps:
     208   
     209      data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.dat')
     210      gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_' + app + '_speedup.gp')
     211     
     212      # Creating data file
     213      width = 15
     214      content = "#nb_procs"
     215      nb_spaces = width - len(content)
     216      content += nb_spaces * ' '
     217      content += "speedup\n"
     218   
     219      for i in nb_procs:
     220         content += "%-15d " % i
     221         val = exec_time[prot][app][i]
     222         content += "%-15f\n" % (exec_time[prot][app][1] / float(val))
     223   
     224      plot_str = "\"" + data_speedup_name + "\" using ($1):($2) lc rgb \"#654387\" title \"Speedup\" with linespoint"
     225     
     226      create_file(data_speedup_name, content)
     227     
     228      # Creating the gp file
     229      template_file = open(speedup_tmpl, 'r')
     230      template = template_file.read()
     231     
     232      gp_commands = template % dict(appli = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + app + '_speedup'))
     233     
     234      create_file(gp_speedup_name, gp_commands)
     235     
     236      # Calling gnuplot
     237      print "gnuplot", gp_speedup_name
     238      subprocess.call([ 'gnuplot', gp_speedup_name ])
     239
     240
     241############################################################
     242### Graph 3 : All speedups on the same Graph             ###
     243############################################################
     244
     245for prot in single_protocols:
     246   # This graph uses the same template as the graph 2
     247   data_speedup_name = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.dat')
     248   gp_speedup_name   = os.path.join(scripts_path, gen_dir, prot + '_all_speedup.gp')
     249   
     250   # Creating data file
    145251   width = 15
    146    content = ""
    147    
    148    for metric in [ '#nb_procs' ] + grouped_metrics:
    149       content += metric + " "
    150       nb_spaces = width - len(metric)
    151       content += nb_spaces * ' '
     252   content = "#nb_procs"
     253   nb_spaces = width - len(content)
     254   content += (nb_spaces + 1) * ' '
     255   for app in apps:
     256      content += app + " "
     257      content += (width - len(app)) * " "
    152258   content += "\n"
    153 
     259   
    154260   for i in nb_procs:
    155261      content += "%-15d " % i
    156       for metric in grouped_metrics:
    157          val = float(metrics_val[app][i][metric]) / exec_time[app][i] * 1000
    158          content += "%-15f " % val
     262      for app in apps:
     263         val = exec_time[prot][app][i]
     264         content += "%-15f " % (exec_time[prot][app][1] / float(val))
    159265      content += "\n"
    160266   
    161    create_file(data_coherence_name, content)
    162 
    163    # Creating the gp file
    164    template_file = open(coherence_tmpl, 'r')
     267   create_file(data_speedup_name, content)
     268   
     269   # Creating gp file
     270   template_file = open(speedup_tmpl, 'r')
    165271   template = template_file.read()
    166272   
    167273   plot_str = ""
    168274   col = 2
    169    for metric in grouped_metrics:
    170       if metric != grouped_metrics[0]:
    171          plot_str += ", \\\n    "
    172       plot_str += "\"" + data_coherence_name + "\" using ($1):($" + str(col) + ") lc rgb " + colors[col - 2] + " title \"" + m_metric_name[metric] + "\" with linespoint"
     275   for app in apps:
     276      if app != apps[0]:
     277         plot_str += ", \\\n     "
     278      plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_app_name[app] + "\" with linespoint"
    173279      col += 1
    174    gp_commands = template % dict(app_name = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, app + '_coherence'))
    175    
    176    create_file(gp_coherence_name, gp_commands)
    177    
     280   
     281   gp_commands = template % dict(appli = "All Applications", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_all_speedup'))
     282     
     283   create_file(gp_speedup_name, gp_commands)
     284     
    178285   # Calling gnuplot
    179    print "gnuplot", gp_coherence_name
    180    subprocess.call([ 'gnuplot', gp_coherence_name ])
    181 
    182 
    183 ############################################################
    184 ### Graph 2 : Speedup per Application                    ###
    185 ############################################################
     286   print "gnuplot", gp_speedup_name
     287   subprocess.call([ 'gnuplot', gp_speedup_name ])
     288
     289
     290############################################################
     291### Graph 4 : Graph per metric                           ###
     292############################################################
     293
     294# The following section creates the graphs grouped by measure (e.g. #broadcasts)
     295# The template file cannot be easily created otherwise it would not be generic
     296# in many ways. This is why it is mainly created here.
     297# Graphs are created for metric in the "individual_metrics" list
     298
     299for prot in single_protocols:
     300   for metric in individual_metrics:
     301      data_metric_name = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.dat')
     302      gp_metric_name   = os.path.join(scripts_path, gen_dir, prot + '_' + metric + '.gp')
     303   
     304      # Creating the gp file
     305      # Setting xtics, i.e. number of procs for each application
     306      xtics_str = "("
     307      first = True
     308      xpos = 1
     309      app_labels = ""
     310      for num_appli in range(0, len(apps)):
     311         for i in nb_procs:
     312            if not first:
     313               xtics_str += ", "
     314            first = False
     315            if i == nb_procs[0]:
     316               xpos_first = xpos
     317            xtics_str += "\"%d\" %.1f" % (i, xpos)
     318            xpos_last = xpos
     319            xpos += 1.5
     320         xpos += 0.5
     321         app_name_xpos = float((xpos_first + xpos_last)) / 2
     322         app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
     323      xtics_str += ")"
     324   
     325      xmax_val = float(xpos - 1)
     326   
     327      # Writing the lines of "plot"
     328      plot_str = ""
     329      xpos = 0
     330      first = True
     331      column = 2
     332      for i in range(0, len(nb_procs)):
     333         if not first:
     334            plot_str += ", \\\n    "
     335         first = False
     336         plot_str += "\"%s\" using ($1+%.1f):($%d) lc rgb %s notitle with boxes" % (data_metric_name, xpos, column, colors[i])
     337         column += 1
     338         xpos += 1.5
     339   
     340      template_file = open(metric_tmpl, 'r')
     341      template = template_file.read()
     342   
     343      gp_commands = template % dict(xtics_str = xtics_str, app_labels = app_labels, ylabel_str = m_metric_name[metric], norm_factor_str = m_norm_factor_name[m_metric_norm[metric]], xmax_val = xmax_val, plot_str = plot_str, svg_name = os.path.join(graph_dir, prot + '_' + metric))
     344   
     345      create_file(gp_metric_name, gp_commands)
     346     
     347      # Creating the data file
     348      width = 15
     349      content = "#x_pos"
     350      nb_spaces = width - len(content)
     351      content += nb_spaces * ' '
     352      for i in nb_procs:
     353         content += "%-15d" % i
     354      content += "\n"
     355   
     356      x_pos = 1
     357      for app in apps:
     358         # Computation of x_pos
     359         content += "%-15f" % x_pos
     360         x_pos += len(nb_procs) * 1.5 + 0.5
     361         for i in nb_procs:
     362            if m_metric_norm[metric] == "N":
     363               content += "%-15d" % (metrics_val[prot][app][i][metric])
     364            elif m_metric_norm[metric] == "P":
     365               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / i)
     366            elif m_metric_norm[metric] == "C":
     367               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / exec_time[prot][app][i] * 1000)
     368            elif m_metric_norm[metric] == "W":
     369               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_write'])) # Number of writes
     370            elif m_metric_norm[metric] == "R":
     371               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_read'])) # Number of reads
     372            elif m_metric_norm[metric] == "D":
     373               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][i]['total_direct'])) # Number of req.
     374            elif is_numeric(m_metric_norm[metric]):
     375               content += "%-15f" % (float(metrics_val[prot][app][i][metric]) / float(metrics_val[prot][app][int(m_metric_norm[metric])][metric]))
     376            else:
     377               assert(False)
     378   
     379         app_name = m_app_name[app]
     380         content += "#" + app_name + "\n"
     381     
     382      create_file(data_metric_name, content)
     383   
     384      # Calling gnuplot
     385      print "gnuplot", gp_metric_name
     386      subprocess.call([ 'gnuplot', gp_metric_name ])
     387
     388
     389############################################################
     390### Graph 5 : Stacked histogram with counters            ###
     391############################################################
     392
     393# The following section creates a stacked histogram containing
     394# the metrics in the "stacked_metric" list
     395# It is normalized per application w.r.t the values on 256 procs
     396
     397for prot in single_protocols:
     398   data_stacked_name = os.path.join(scripts_path, gen_dir, prot + '_stacked.dat')
     399   gp_stacked_name   = os.path.join(scripts_path, gen_dir, prot + '_stacked.gp')
     400   
     401   norm_factor_value = 256
     402   
     403   # Creating the gp file
     404   template_file = open(stacked_tmpl, 'r')
     405   template = template_file.read()
     406   
     407   xtics_str = "("
     408   first = True
     409   xpos = 1
     410   app_labels = ""
     411   for num_appli in range(0, len(apps)):
     412      for i in nb_procs:
     413         if not first:
     414            xtics_str += ", "
     415         first = False
     416         if i == nb_procs[0]:
     417            xpos_first = xpos
     418         xtics_str += "\"%d\" %d -1" % (i, xpos)
     419         xpos_last = xpos
     420         xpos += 1
     421      xpos += 1
     422      app_name_xpos = float((xpos_first + xpos_last)) / 2
     423      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
     424   xtics_str += ")"
     425   
     426   plot_str = "newhistogram \"\""
     427   n = 1
     428   for stacked_metric in stacked_metrics:
     429      plot_str += ", \\\n    " + "'" + data_stacked_name + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[stacked_metric] + "\""
     430      n += 1
     431   
     432   ylabel_str = "Breakdown of Coherence Traffic Normalized w.r.t. \\nthe Values on %d Processors" % norm_factor_value
     433   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_stacked'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
     434   
     435   create_file(gp_stacked_name, content)
     436   
     437   # Creating the data file
     438   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
     439   content = "#"
     440   for stacked_metric in stacked_metrics:
     441      content += stacked_metric
     442      content += ' ' + ' ' * (15 - len(stacked_metric))
     443   content += "\n"
     444   for app in apps:
     445      if app != apps[0]:
     446         for i in range(0, len(stacked_metrics)):
     447            content += "%-15f" % 0.0
     448         content += "\n"
     449      for i in nb_procs:
     450         for stacked_metric in stacked_metrics:
     451            content += "%-15f" % (float(metrics_val[prot][app][i][stacked_metric]) / metrics_val[prot][app][norm_factor_value]['total_stacked'])
     452         content += "\n"
     453   
     454   create_file(data_stacked_name, content)
     455   # Calling gnuplot
     456   print "gnuplot", gp_stacked_name
     457   subprocess.call([ 'gnuplot', gp_stacked_name ])
     458
     459
     460
     461#################################################################################
     462### Graph 6 : Stacked histogram with coherence cost compared to r/w cost      ###
     463#################################################################################
     464
     465# The following section creates pairs of stacked histograms, normalized w.r.t. the first one.
     466# The first one contains the cost of reads and writes, the second contains the cost
     467# of m_inv, m_up and broadcasts (extrapolated)
     468
     469for prot in single_protocols:
     470   data_cost_filename = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.dat')
     471   gp_cost_filename   = os.path.join(scripts_path, gen_dir, prot + '_relative_cost.gp')
     472   
     473   direct_cost_metrics = [ 'read_cost', 'write_cost' ]
     474   coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
     475   
     476   # Creating the gp file
     477   template_file = open(stacked_tmpl, 'r')
     478   template = template_file.read()
     479   
     480   xtics_str = "("
     481   first = True
     482   xpos = 1
     483   app_labels = ""
     484   for num_appli in range(0, len(apps)):
     485      first_proc = True
     486      for i in nb_procs:
     487         if i > 4:
     488            if not first:
     489               xtics_str += ", "
     490            first = False
     491            if first_proc:
     492               first_proc = False
     493               xpos_first = xpos
     494            xtics_str += "\"%d\" %f -1" % (i, float(xpos + 0.5))
     495            xpos_last = xpos
     496            xpos += 3
     497      app_name_xpos = float((xpos_first + xpos_last)) / 2
     498      app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
     499      #xpos += 1
     500   xtics_str += ")"
     501   
     502   plot_str = "newhistogram \"\""
     503   n = 1
     504   for cost_metric in direct_cost_metrics + coherence_cost_metrics:
     505      plot_str += ", \\\n    " + "'" + data_cost_filename + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[cost_metric] + "\""
     506      n += 1
     507   
     508   ylabel_str = "Coherence Cost Compared to Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
     509   content = template % dict(svg_name = os.path.join(graph_dir, prot + '_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = "")
     510   
     511   create_file(gp_cost_filename, content)
     512   
     513   # Creating the data file
     514   # Values are normalized by application, w.r.t. the number of requests for a given number of procs
     515   content = "#"
     516   for cost_metric in direct_cost_metrics:
     517      content += cost_metric
     518      content += ' ' + ' ' * (15 - len(cost_metric))
     519   for cost_metric in coherence_cost_metrics:
     520      content += cost_metric
     521      content += ' ' + ' ' * (15 - len(cost_metric))
     522   content += "\n"
     523   for app in apps:
     524      if app != apps[0]:
     525         for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
     526            content += "%-15f" % 0.0
     527         content += "\n"
     528      for i in nb_procs:
     529         if i > 4:
     530            for cost_metric in direct_cost_metrics:
     531               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
     532            for cost_metric in coherence_cost_metrics:
     533               content += "%-15f" % 0.0
     534            content += "\n"
     535            for cost_metric in direct_cost_metrics:
     536               content += "%-15f" % 0.0
     537            for cost_metric in coherence_cost_metrics:
     538               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[prot][app][i]['direct_cost'])
     539            content += "\n"
     540            if i != nb_procs[-1]:
     541               for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
     542                  content += "%-15f" % 0.0
     543               content += "\n"
     544   
     545   create_file(data_cost_filename, content)
     546   # Calling gnuplot
     547   print "gnuplot", gp_cost_filename
     548   subprocess.call([ 'gnuplot', gp_cost_filename ])
     549
     550
     551#################################################################################
     552### Joint Graphs to several architectures                                     ###
     553#################################################################################
     554
     555if len(joint_protocols) == 0:
     556   sys.exit()
     557
     558#################################################################################
     559### Graph 7: Comparison of Speedups (normalized w.r.t. 1 proc on first arch)  ###
     560#################################################################################
     561
    186562
    187563for app in apps:
    188564
    189    data_speedup_name   = os.path.join(scripts_path, gen_dir, app + '_speedup.dat')
    190    gp_speedup_name     = os.path.join(scripts_path, gen_dir, app + '_speedup.gp')
     565   data_speedup_name = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.dat')
     566   gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'joint_' + app + '_speedup.gp')
    191567   
    192568   # Creating data file
     
    199575   for i in nb_procs:
    200576      content += "%-15d " % i
    201       val = exec_time[app][i]
    202       content += "%-15f\n" % (exec_time[app][1] / float(val))
    203 
    204    plot_str = "\"" + data_speedup_name + "\" using ($1):($2) lc rgb \"#654387\" title \"Speedup\" with linespoint"
    205    
     577      for prot in joint_protocols:
     578         val = exec_time[prot][app][i]
     579         content += "%-15f " % (exec_time[joint_protocols[0]][app][1] / float(val))
     580      content += "\n"
     581
    206582   create_file(data_speedup_name, content)
    207583   
     
    209585   template_file = open(speedup_tmpl, 'r')
    210586   template = template_file.read()
    211    
    212    gp_commands = template % dict(appli = m_app_name[app], nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, app + '_speedup'))
     587 
     588   plot_str = ""
     589   col = 2
     590   for prot in joint_protocols:
     591      if prot != joint_protocols[0]:
     592         plot_str += ", \\\n     "
     593      plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_prot_name[prot] + "\" with linespoint"
     594      col += 1
     595 
     596   gp_commands = template % dict(appli = m_app_name[app] + " Normalized w.r.t. " + m_prot_name[joint_protocols[0]] + " on 1 Processor", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'joint_' + app + '_speedup'))
    213597   
    214598   create_file(gp_speedup_name, gp_commands)
     
    219603
    220604
    221 ############################################################
    222 ### Graph 3 : All speedups on the same Graph             ###
    223 ############################################################
    224 
    225 # This graph uses the same template as the graph 2
    226 
    227 data_speedup_name = os.path.join(scripts_path, gen_dir, 'all_speedup.dat')
    228 gp_speedup_name   = os.path.join(scripts_path, gen_dir, 'all_speedup.gp')
    229 
    230 # Creating data file
    231 width = 15
    232 content = "#nb_procs"
    233 nb_spaces = width - len(content)
    234 content += (nb_spaces + 1) * ' '
    235 for app in apps:
    236    content += app + " "
    237    content += (width - len(app)) * " "
    238 content += "\n"
    239 
    240 for i in nb_procs:
    241    content += "%-15d " % i
    242    for app in apps:
    243       val = exec_time[app][i]
    244       content += "%-15f " % (exec_time[app][1] / float(val))
    245    content += "\n"
    246 
    247 create_file(data_speedup_name, content)
    248 
    249 # Creating gp file
    250 template_file = open(speedup_tmpl, 'r')
    251 template = template_file.read()
    252 
    253 plot_str = ""
    254 col = 2
    255 for app in apps:
    256    if app != apps[0]:
    257       plot_str += ", \\\n     "
    258    plot_str += "\"" + data_speedup_name + "\" using ($1):($" + str(col) + ") lc rgb %s title \"" % (colors[col - 2])  + m_app_name[app] + "\" with linespoint"
    259    col += 1
    260 
    261 gp_commands = template % dict(appli = "All Applications", nb_procs = nb_procs[-1] + 1, plot_str = plot_str, svg_name = os.path.join(graph_dir, 'all_speedup'))
    262    
    263 create_file(gp_speedup_name, gp_commands)
    264    
    265 # Calling gnuplot
    266 print "gnuplot", gp_speedup_name
    267 subprocess.call([ 'gnuplot', gp_speedup_name ])
    268 
    269 
    270 ############################################################
    271 ### Graph 4 : Graph per metric                           ###
    272 ############################################################
    273 
    274 # The following section creates the graphs grouped by measure (e.g. #broadcasts)
    275 # The template file cannot be easily created otherwise it would not be generic
    276 # in many ways. This is why it is mainly created here.
    277 # Graphs are created for metric in the "individual_metrics" list
    278 
    279 for metric in individual_metrics:
    280    data_metric_name = os.path.join(scripts_path, gen_dir, metric + '.dat')
    281    gp_metric_name   = os.path.join(scripts_path, gen_dir, metric + '.gp')
    282 
    283    # Creating the gp file
    284    # Setting xtics, i.e. number of procs for each application
    285    xtics_str = "("
    286    first = True
    287    xpos = 1
    288    app_labels = ""
    289    for num_appli in range(0, len(apps)):
    290       for i in nb_procs:
    291          if not first:
    292             xtics_str += ", "
    293          first = False
    294          if i == nb_procs[0]:
    295             xpos_first = xpos
    296          xtics_str += "\"%d\" %.1f" % (i, xpos)
    297          xpos_last = xpos
    298          xpos += 1.5
    299       xpos += 0.5
    300       app_name_xpos = float((xpos_first + xpos_last)) / 2
    301       app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
    302    xtics_str += ")"
    303 
    304    xmax_val = xpos + 0.5
    305 
    306    # Writing the lines of "plot"
    307    plot_str = ""
    308    xpos = 0
    309    first = True
    310    column = 2
    311    for i in range(0, len(nb_procs)):
    312       if not first:
    313          plot_str += ", \\\n    "
    314       first = False
    315       plot_str += "\"%s\" using ($1+%.1f):($%d) lc rgb %s notitle with boxes" % (data_metric_name, xpos, column, colors[i])
    316       column += 1
    317       xpos += 1.5
    318 
    319    template_file = open(metric_tmpl, 'r')
    320    template = template_file.read()
    321 
    322    gp_commands = template % dict(xtics_str = xtics_str, app_labels = app_labels, ylabel_str = m_metric_name[metric], norm_factor_str = m_norm_factor_name[m_metric_norm[metric]], xmax_val = xmax_val, plot_str = plot_str, svg_name = os.path.join(graph_dir, metric))
    323 
    324    create_file(gp_metric_name, gp_commands)
    325    
    326    # Creating the data file
    327    width = 15
    328    content = "#x_pos"
    329    nb_spaces = width - len(content)
    330    content += nb_spaces * ' '
    331    for i in nb_procs:
    332       content += "%-15d" % i
    333    content += "\n"
    334 
    335    x_pos = 1
    336    for app in apps:
    337       # Computation of x_pos
    338       content += "%-15f" % x_pos
    339       x_pos += len(nb_procs) * 1.5 + 0.5
    340       for i in nb_procs:
    341          if m_metric_norm[metric] == "N":
    342             content += "%-15d" % (metrics_val[app][i][metric])
    343          elif m_metric_norm[metric] == "P":
    344             content += "%-15f" % (float(metrics_val[app][i][metric]) / i)
    345          elif m_metric_norm[metric] == "C":
    346             content += "%-15f" % (float(metrics_val[app][i][metric]) / exec_time[app][i] * 1000)
    347          elif m_metric_norm[metric] == "W":
    348             content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_write'])) # Number of writes
    349          elif m_metric_norm[metric] == "R":
    350             content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_read'])) # Number of reads
    351          elif m_metric_norm[metric] == "D":
    352             content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][i]['total_direct'])) # Number of req.
    353          elif is_numeric(m_metric_norm[metric]):
    354             content += "%-15f" % (float(metrics_val[app][i][metric]) / float(metrics_val[app][int(m_metric_norm[metric])][metric]))
    355          else:
    356             assert(False)
    357 
    358       app_name = m_app_name[app]
    359       content += "#" + app_name + "\n"
    360    
    361    create_file(data_metric_name, content)
    362 
    363    # Calling gnuplot
    364    print "gnuplot", gp_metric_name
    365    subprocess.call([ 'gnuplot', gp_metric_name ])
    366 
    367 
    368 ############################################################
    369 ### Graph 5 : Stacked histogram with counters            ###
    370 ############################################################
    371 
    372 # The following section creates a stacked histogram containing
    373 # the metrics in the "stacked_metric" list
    374 # It is normalized per application w.r.t the values on 256 procs
    375 
    376 data_stacked_name = os.path.join(scripts_path, gen_dir, 'stacked.dat')
    377 gp_stacked_name   = os.path.join(scripts_path, gen_dir, 'stacked.gp')
    378 
    379 norm_factor_value = 256
    380 
     605#################################################################################
     606### Graph 8 : Joint Stacked histogram with coherence cost and r/w cost        ###
     607#################################################################################
     608
     609# The following section creates pairs of stacked histograms for each arch for each number of proc for each app, normalized by (app x number of procs) (with first arch, R/W cost, first of the 2*num_arch histo). It is close to Graph 6
     610
     611data_cost_filename = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.dat')
     612gp_cost_filename   = os.path.join(scripts_path, gen_dir, 'joint_relative_cost.gp')
     613   
     614direct_cost_metrics = [ 'read_cost', 'write_cost' ]
     615coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
     616   
    381617# Creating the gp file
    382618template_file = open(stacked_tmpl, 'r')
    383619template = template_file.read()
    384 
     620   
    385621xtics_str = "("
    386622first = True
    387 xpos = 1
     623xpos = 1 # successive x position of the center of the first bar in a application
    388624app_labels = ""
    389 for num_appli in range(0, len(apps)):
    390    for i in nb_procs:
    391       if not first:
    392          xtics_str += ", "
    393       first = False
    394       if i == nb_procs[0]:
    395          xpos_first = xpos
    396       xtics_str += "\"%d\" %d -1" % (i, xpos)
    397       xpos_last = xpos
    398       xpos += 1
    399    xpos += 1
    400    app_name_xpos = float((xpos_first + xpos_last)) / 2
    401    app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
    402 xtics_str += ")"
    403 
    404 plot_str = "newhistogram \"\""
    405 n = 1
    406 for stacked_metric in stacked_metrics:
    407    plot_str += ", \\\n    " + "'" + data_stacked_name + "'" + " using " + str(n) + " lc rgb " + colors[n] + " title \"" + m_metric_name[stacked_metric] + "\""
    408    n += 1
    409 
    410 ylabel_str = "Breakdown of Coherence Traffic Normalized w.r.t. \\nthe Values on %d Processors" % norm_factor_value
    411 content = template % dict(svg_name = os.path.join(graph_dir, 'stacked'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels)
    412 
    413 create_file(gp_stacked_name, content)
    414 
    415 # Creating the data file
    416 # Values are normalized by application, w.r.t. the number of requests for a given number of procs
    417 content = "#"
    418 for stacked_metric in stacked_metrics:
    419    content += stacked_metric
    420    content += ' ' + ' ' * (15 - len(stacked_metric))
    421 content += "\n"
    422 for app in apps:
    423    if app != apps[0]:
    424       for i in range(0, len(stacked_metrics)):
    425          content += "%-15f" % 0.0
    426       content += "\n"
    427    for i in nb_procs:
    428       for stacked_metric in stacked_metrics:
    429          content += "%-15f" % (float(metrics_val[app][i][stacked_metric]) / metrics_val[app][norm_factor_value]['total_stacked'])
    430       content += "\n"
    431 
    432 create_file(data_stacked_name, content)
    433 # Calling gnuplot
    434 print "gnuplot", gp_stacked_name
    435 subprocess.call([ 'gnuplot', gp_stacked_name ])
    436 
    437 
    438 
    439 #################################################################################
    440 ### Graph 6 : Stacked histogram with coherence cost compared to r/w cost      ###
    441 #################################################################################
    442 
    443 # The following section creates pairs of stacked histograms, normalized w.r.t. the first one.
    444 # The first one contains the cost of reads and writes, the second contains the cost
    445 # of m_inv, m_up and broadcasts (extrapolated)
    446 
    447 data_cost_filename = os.path.join(scripts_path, gen_dir, 'relative_cost.dat')
    448 gp_cost_filename   = os.path.join(scripts_path, gen_dir, 'relative_cost.gp')
    449 
    450 direct_cost_metrics = [ 'read_cost', 'write_cost' ]
    451 coherence_cost_metrics = ['update_cost', 'm_inv_cost', 'broadcast_cost' ]
    452 
    453 # Creating the gp file
    454 template_file = open(stacked_tmpl, 'r')
    455 template = template_file.read()
    456 
    457 xtics_str = "("
    458 first = True
    459 xpos = 1.5
    460 app_labels = ""
     625prot_labels = ""
    461626for num_appli in range(0, len(apps)):
    462627   first_proc = True
    463628   for i in nb_procs:
    464629      if i > 4:
     630         x = 0 # local var for computing position of protocol names
     631         for prot in joint_protocols:
     632            prot_labels += "set label \"%s\" at first %f, character 2 center font \"Times,10\"\n" % (m_prot_name[prot], float((xpos - 0.5)) + x) # -0.5 instead of +0.5, don't know why... (bug gnuplot?)
     633            x += 2
     634
    465635         if not first:
    466636            xtics_str += ", "
     
    469639            first_proc = False
    470640            xpos_first = xpos
    471          xtics_str += "\"%d\" %f -1" % (i, xpos)
     641         xtics_str += "\"%d\" %f -1" % (i, float(xpos - 0.5 + len(joint_protocols)))
    472642         xpos_last = xpos
    473          xpos += 3
     643         xpos += 1 + len(joint_protocols) * 2
    474644   app_name_xpos = float((xpos_first + xpos_last)) / 2
    475    app_labels += "set label \"%s\" at first %f,character 1 center font\"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
     645   app_labels += "set label \"%s\" at first %f,character 1 center font \"Times,12\"\n" % (m_app_name[apps[num_appli]], app_name_xpos)
    476646   xpos += 1
    477647xtics_str += ")"
     
    483653   n += 1
    484654
    485 ylabel_str = "Coherence Cost Compared to Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
    486 content = template % dict(svg_name = os.path.join(graph_dir, 'rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels)
     655ylabel_str = "Coherence Cost and Direct Requests Cost,\\nNormalized per Application for each Number of Processors"
     656content = template % dict(svg_name = os.path.join(graph_dir, 'joint_rel_cost'), xtics_str = xtics_str, plot_str = plot_str, ylabel_str = ylabel_str, app_labels = app_labels, prot_labels = prot_labels)
    487657
    488658create_file(gp_cost_filename, content)
     
    500670for app in apps:
    501671   if app != apps[0]:
    502       for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
     672      for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
    503673         content += "%-15f" % 0.0
    504674      content += "\n"
    505675   for i in nb_procs:
    506676      if i > 4:
    507          for cost_metric in direct_cost_metrics:
    508             content += "%-15f" % (float(metrics_val[app][i][cost_metric]) / metrics_val[app][i]['direct_cost'])
    509          for cost_metric in coherence_cost_metrics:
    510             content += "%-15f" % 0.0
    511          content += "\n"
    512          for cost_metric in direct_cost_metrics:
    513             content += "%-15f" % 0.0
    514          for cost_metric in coherence_cost_metrics:
    515             content += "%-15f" % (float(metrics_val[app][i][cost_metric]) / metrics_val[app][i]['direct_cost'])
    516          content += "\n"
    517          for i in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
    518             content += "%-15f" % 0.0
    519          content += "\n"
     677         for prot in joint_protocols:
     678            for cost_metric in direct_cost_metrics:
     679               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
     680            for cost_metric in coherence_cost_metrics:
     681               content += "%-15f" % 0.0
     682            content += "\n"
     683            for cost_metric in direct_cost_metrics:
     684               content += "%-15f" % 0.0
     685            for cost_metric in coherence_cost_metrics:
     686               content += "%-15f" % (float(metrics_val[prot][app][i][cost_metric]) / metrics_val[joint_protocols[0]][app][i]['direct_cost'])
     687            content += "\n"
     688         if i != nb_procs[-1]:
     689            for j in range(0, len(direct_cost_metrics) + len(coherence_cost_metrics)):
     690               content += "%-15f" % 0.0
     691            content += "\n"
    520692
    521693create_file(data_cost_filename, content)
     
    525697
    526698
     699
     700
     701
Note: See TracChangeset for help on using the changeset viewer.