<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">#!/s/std/bin/python

import sys, string, os, glob, re, mfgraph


GEMS = "/u/k/i/kidd/research/GEMS/GEMS"
results_dir = GEMS + "/results/"
#results_dir = "../results"

make_excel_files = 1

def get_int_stat(file, stat):
    grep_lines = mfgraph.grep(file, stat)
    line = string.split(grep_lines[0])
    return int(line[1])

def get_int_param(file, param):
  grep_lines = mfgraph.grep(file, param)
  line = string.split(grep_lines[0])
  return int(line[1])
  
def get_float_stat(file, stat):
    grep_lines = mfgraph.grep(file, stat)
    line = string.split(grep_lines[0])
    return int(line[1])

def get_runtime(benchmark):
    data = []
    files = glob.glob(results_dir + "/" + benchmark + "/*.stats")
    for file in files:
        procs  = get_int_stat(file, "g_NUM_NODES")
        cycles = get_int_stat(file, "Ruby_cycles")
        #print "%dp:\t%d" % (procs, cycles)
        data.append((procs, cycles))
    return data

def make_microbench_line(jgraphs, name, runs, bw, protocol_map, label):
    xlabel = "Processors"
    ylabel = "Run Time"

    #proc_values = [1,   2,  3,  4,  5,  6,  7,  8,  9, 10,
    #               11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
    #               21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
    #               31]
    #proc_values = [1, 2, 3, 4, 5, 6, 7]
    proc_values = [1, 2, 4, 8, 16, 32]
    read_set = None
    data = []
    
    for run in runs:
        #print run
        protocol = protocol_map[run[2]]
        line = [run[0]]
        for procs in proc_values:
            #print procs
            lst = []
            #read_set_str = "%s" % read_set
            glob_str = "%s/%s/%s-%dp-*%s-*%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, bw)
            files = glob.glob(glob_str)
            if files == []:
                #print "No files match: %s" % glob_str
                #exit
                continue
            for file in files:
                print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #divide by procs?
                
                print "ruby_cycles: %f" % ruby_cycles
                lst.append(ruby_cycles)
                cycles = mfgraph.mean(lst)
            #print lst
            conf = mfgraph.confidence_interval_95_percent(lst)
            #print "95 conf: %f" % conf
            line.append([procs, cycles, cycles - conf, cycles + conf])
        data.append(line)

    print data
    jgraphs.append(mfgraph.line_graph(data,
                                      title = name,
                                      ylabel = "Execution Time (in millions of cycles)",
                                      xlabel = "Threads",
                                      xsize = 4.5,
                                      ysize = 8.0,
                                      line_thickness = 2.0,
                                      legend_x = "90",
                                      marktype = ["circle", "box", "triangle", "diamond"],
                                      #marksize = [0.4, 0.4, 0.4, 0.5],
                                      ))
    
    graph_out = mfgraph.line_graph(data,
                                   title = "",
                                   ylabel = "Execution Time (in millions of cycles)",
                                   #ymax = 1.50,
                                   ymin = 0,
                                   xsize = 2.5,
                                   ysize = 1.778,
                                   line_thickness = 2.0,
                                   marktype = ["circle", "box", "triangle", "diamond"],
                                   marksize = [0.2, 0.3, 0.3, 0.4],
                                   title_fontsize = "12",
                                   label_fontsize = "8",
                                   legend_fontsize = "9",
                                   legend_x = "70",
                                   colors = ["0 0 0",
                                             ".6 .6 .6",
                                             ".2 .2 .2",
                                             ".8 .8 .8",
                                             ".4 .4 .4"]
                                          )
    mfgraph.make_eps(graph_out, "%s_%s_line" % (name, label), "xref_figures")


def make_microbench_speedup_line(jgraphs, name, runs, bw, protocol_map, label):
    xlabel = "Processors"
    ylabel = "Run Time"


    proc_values = [1, 2, 4, 8, 16, 32]
    read_set = None
    data = []
    
    for run in runs:
        #print run
        protocol = protocol_map[run[2]]
        line = [run[0]]
        base_cycles = 0.0
        for procs in proc_values:
            #print procs
            lst = []
            #read_set_str = "%s" % read_set
            glob_str = "%s/%s/%s-%dp-*%s-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, bw)

            files = glob.glob(glob_str)
            if files == []:
                print "No files match: %s" % glob_str
                #exit
                continue
            for file in files:
                #print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #divide by procs?
                #print "ruby_cycles: %f" % ruby_cycles
                lst.append(ruby_cycles)
            cycles = mfgraph.mean(lst)
            conf = mfgraph.confidence_interval_95_percent(lst)
            #print "95 conf: %f" % conf
            if procs == 1:
                base_cycles = cycles
                #print "base_cycles is: %f" % base_cycles

            line.append([procs, base_cycles/cycles, base_cycles/(cycles - conf), base_cycles/(cycles + conf)])
        data.append(line)

    #linear = ["Linear"]
    #for p in proc_values:
    #    linear.append([p, p, p, p])
    #data.append(linear)

    #print data
    jgraphs.append(mfgraph.line_graph(data,
                                      title = name,
                                      ylabel = "Speedup",
                                      xlabel = "Threads",
                                      xsize = 4.5,
                                      ysize = 6.0,
                                      line_thickness = 2.0,
                                      legend_x = "90",
                                      marktype = ["circle", "box", "triangle", "diamond"],
                                      #marksize = [0.4, 0.4, 0.4, 0.5],
                                      ))

    graph_out = mfgraph.line_graph(data,
                                   title = "",
                                   ylabel = "Speedup",
                                   xlabel = "Threads",
                                   ymin = 0,
                                   xsize = 2.5,
                                   ysize = 1.778,
                                   line_thickness = 2.0,
                                   marktype = ["circle", "box", "triangle", "diamond"],
                                   marksize = [0.2, 0.3, 0.3, 0.4],
                                   title_fontsize = "12",
                                   label_fontsize = "8",
                                   legend_fontsize = "9",
                                   legend_x = "90",
                                   legend_y = "70",
                                   colors = ["0 0 0",
                                             ".6 .6 .6",
                                             ".2 .2 .2",
                                             ".8 .8 .8",
                                             ".4 .4 .4"]
                                          )
    mfgraph.make_eps(graph_out, "%s_%s_speedup_line" % (name, label), "xref_figures")
    mfgraph.make_excel_line(name="%s-%s" % (name, label), data=data);

def make_microbench_speedup_line2(jgraphs, name, runs, bw, protocol, nest, label, minopen):
    xlabel = "Processors"
    ylabel = "Run Time"

    #proc_values = [1,   2,  3,  4,  5,  6,  7,  8,  9, 10,
    #               11, 12, 13, 14, 15,
    #               16, 17, 18, 19, 20,
    #               21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
    #               31]
    proc_values = [1, 3, 7, 15, 24, 31]
    read_set = None
    data = []
    
    for run in runs:
        #print run
        #protocol = protocol_map[run[2]]
        line = [run[0]]
        base_cycles = 0.0
        for procs in proc_values:
            #print procs
            lst = []
            #read_set_str = "%s" % read_set
            glob_str = "%s/%s/%s-%dp-*%s-%dnx-%dminopen-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, nest, minopen, bw)
            #glob_str = "%s/%s/%s*-%dp-default-%s-%dnx-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, nesting, bw)
            files = glob.glob(glob_str)
            if files == []:
                print "No files match: %s" % glob_str
                #exit
                continue
            for file in files:
                print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #divide by procs?
                #print "ruby_cycles: %f" % ruby_cycles
                lst.append(ruby_cycles)
            cycles = mfgraph.mean(lst)
            conf = mfgraph.confidence_interval_95_percent(lst)
            #print "95 conf: %f" % conf
            if procs == 1:
                base_cycles = cycles
                print "base_cycles is: %f" % base_cycles

            line.append([procs, base_cycles/cycles, base_cycles/(cycles - conf), base_cycles/(cycles + conf)])
        data.append(line)

    linear = ["Linear"]
    for p in proc_values:
        linear.append([p, p, p, p])
    data.append(linear)

    #print data
    jgraphs.append(mfgraph.line_graph(data,
                                      title = name,
                                      ylabel = "Speedup",
                                      xlabel = "Threads",
                                      xsize = 4.5,
                                      ysize = 6.0,
                                      line_thickness = 2.0,
                                      legend_x = "90",
                                      marktype = ["circle", "box", "triangle", "diamond"],
                                      #marksize = [0.4, 0.4, 0.4, 0.5],
                                      ))
    
def make_microbench_combined_speedup_line(jgraphs, name, runs, bw, protocol, label):
    xlabel = "Processors"
    ylabel = "Run Time"

    #proc_values = [1,   2,  3,  4,  5,  6,  7,  8,  9, 10,
    #               11, 12, 13, 14, 15,
    #               16, 17, 18, 19, 20,
    #               21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
    #               31]
    proc_values = [1, 3, 7, 15, 24, 31]
    minopen_values = [40, 35]
    nest_values = [1, 4]
    read_set = None
    data = []
    
    
    for run in runs:
        line = [run[0]]
        base_cycles = 0.0
        for procs in proc_values:
            lst = []
            glob_str = "%s/%s/%s-%dp-*%s-%snx-%sminopen-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, run[3], run[2], bw)
            files = glob.glob(glob_str)
            if files == []:
                print "No files match: %s" % glob_str
                continue
            for file in files:
                print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #divide by procs?
                #print "ruby_cycles: %f" % ruby_cycles
                lst.append(ruby_cycles)
            cycles = mfgraph.mean(lst)
            conf = mfgraph.confidence_interval_95_percent(lst)
            #print "95 conf: %f" % conf
            if procs == 1:
                base_cycles = cycles
                #print "base_cycles is: %f" % base_cycles
            # print "cycles is: ", base_cycles/cycles
            line.append([procs, base_cycles/cycles, base_cycles/(cycles - conf), base_cycles/(cycles + conf)])
        data.append(line)

    #linear = ["Linear"]
    #for p in proc_values:
    #    linear.append([p, p, p, p])
    #data.append(linear)

    #print data
    graph_out = mfgraph.line_graph(data,
                                      ylabel = "Speedup",
                                      xlabel = "Threads",
                                      label_fontsize = "10",
                                      xsize = 2.5,
                                      ysize = 2.3,
                                      line_thickness = 2.0,
                                      legend_x = "75",
                                      legend_y = "60",
                                      legend_fontsize = "10",
                                      marktype = ["circle", "box", "triangle", "diamond"],
                                      #marksize = [0.4, 0.4, 0.4, 0.5],
                                      )
    
    jgraphs.append(graph_out)
    mfgraph.make_eps(graph_out, label, "btree")
    if make_excel_files:
        mfgraph.make_excel_line(name=label, data=data);
        
def make_microbench_bar(jgraphs, name, runs, bw, protocol_map, label):
    xlabel = "Processors"
    ylabel = "Run Time"
    #read_set = 64
    read_set = None

    bars = []
    proc_values = [2, 4, 8, 16, 31]
    #proc_values = [1, 2, 8, 15]
    #proc_values = [1, 2] 
    for run in runs:
        #print run
        protocol = protocol_map[run[2]]
        bar = [run[0]]
        for procs in proc_values:
            #print procs
            lst = []
            #read_set_str = "%s" % read_set
            #glob_str = "%s/%s/%s-%sk-%dp-default-%s-%d-*.stats" % (results_dir, run[1], run[1], read_set_str, procs, protocol, bw)
            #glob_str = "%s/%s/%s*-%dp-default-%s-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, bw)
            glob_str = "%s/%s/%s*-%dp-*%s-%d-*.stats" % (results_dir, run[1], run[1], procs, protocol, bw)
            files = glob.glob(glob_str)
            if files == []:
                print "No files match: %s" % glob_str
                #exit
                continue
            for file in files:
                #print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #print "ruby_cycles: %d" % ruby_cycles
                lst.append(ruby_cycles)
                cycles = mfgraph.mean(lst)
            print lst
            conf = mfgraph.confidence_interval_95_percent(lst)

            bar.append(["%d" % procs, [cycles, cycles - conf, cycles + conf]])
        bars.append(bar)

    #print bars
    jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             bar_segment_labels = ["runtime"],
                                             xsize = 5.0,
                                             ))
    
    graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Execution Time (in millions of cycles)",
                                          #ymax = 1.50,
                                          ymin = 0,
                                          xsize = 2.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "9",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "9",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["0 0 0",
                                                    ".6 .6 .6",
                                                    ".2 .2 .2",
                                                    ".8 .8 .8",
                                                    ".4 .4 .4"]
                                          )
    mfgraph.make_eps(graph_out, "%s_%s_bar" % (name, label), "xref_figures")
    if make_excel_files:
        mfgraph.make_excel_bar(name=name,
                               data=bars)
    

def make_read_set(jgraphs, name, runs, bw):
    protocol = "MOESI_xact_hammer_bf"
    xlabel = "Processors"
    ylabel = "Run Time"

    bars = []
    read_set_sizes = [4, 8, 16, 32, 64]
    procs = 7
    for run in runs:
        bar = [run[0]]
        for read_set in read_set_sizes:
            lst = []
            files = glob.glob("%s/%s/%s-%dk-%dp-*%s-%d-*.stats" % (results_dir, run[1], run[1], read_set, procs, protocol, bw))
            for file in files:
                #print file
                lst.append(get_float_stat(file, "Ruby_cycles"))
            #print lst
            cycles = mfgraph.mean(lst)
            conf = mfgraph.confidence_interval_95_percent(lst)
            #print conf
                
            bar.append(["%dk" % read_set, cycles, cycles - conf, cycles + conf])
        bars.append(bar)

    #print bars
    jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             bar_segment_labels = ["runtime"],
                                             xsize = 5.0,
                                             ))
    
    graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Execution Time (in millions of cycles)",
                                          #ymax = 1.50,
                                          ymin = 0,
                                          xsize = 3.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "8",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "9",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["1 1 1",
                                                    ".8 .8 .8",
                                                    ".6 .6 .6",
                                                    ".4 .4 .4",
                                                    "0 0 0"]
                                          )
    mfgraph.make_eps(graph_out, "%s_read_set" % name, "xref_figures")
    if make_excel_files:
        mfgraph.make_excel_bar(name="%s_read_set" % name, data=bars)

def make_norm_runtime(jgraphs, pairs, protocol, label):
    xlabel = "Processors"
    ylabel = "Speedup"

    bars = []
    proc_values = [1, 2, 4, 8, 16, 32]
    for pair in pairs:
        bar = [pair[0]]
        for procs in proc_values:
            #bar = []
            lst = []
            #glob_str = "%s/%s/%s-%dp-default-%s*.stats" % (results_dir, pair[1], pair[1], procs, protocol)
            glob_str = "%s/%s/%s-%dp*-*%s-*.stats" % (results_dir, pair[1], pair[1], procs, protocol)
            files = glob.glob(glob_str)
            if files == []:
                print "%s not found" % glob_str
                continue
            for file in files:
                lst.append(get_int_stat(file, "Ruby_cycles"))
            cycles_base = mfgraph.mean(lst)

            lst = []
            #files = glob.glob("%s/%s/%s-%dp-default-%s*.stats" % (results_dir, pair[2], pair[2], procs, protocol))
            #protocol = "MESI_SMP_LogTM_directory"
            glob_str = "%s/%s/%s-%dp*-*%s-*.stats" % (results_dir, pair[2], pair[2], procs, protocol)
            files = glob.glob(glob_str)
            if files == []:
                print "%s not found" % glob_str
                continue
            for file in files:
                print get_int_stat(file, "Ruby_cycles")
                lst.append(get_int_stat(file, "Ruby_cycles"))
            cycles_test = mfgraph.mean(lst)
                        
            print "cycles_test: %d, cycles_base %d", (cycles_test, cycles_base)
            norm_exe = float(cycles_base)/float(cycles_test)
            bar.append(["%d" % procs, norm_exe])
            #bar += ["%d" % procs, ["TM", cycles_test], ["L", cycles_base]]
        bars.append(bar)

    jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             bar_segment_labels = ["runtime"],
                                             #xsize = 5.0,
                                             #ymax = 5.0,
                                             ))

    graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Speedup ",
                                          ymax = 5.0,
                                          ymin = 0,
                                          xsize = 2.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "5",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "5",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["1 0 1",
                                                    ".8 .8 .8",
                                                    ".6 .6 .6",
                                                    ".4 .4 .4",
                                                    "0 0 0"])
    mfgraph.make_eps(graph_out, "%s_norm" % label, "xref_figures")
    if make_excel_files:
        print bars    
        mfgraph.make_excel_bar("speedup_%s" % label, bars) 
    
def make_abs_runtime(jgraphs, pairs, procs, protocol, label):
    xlabel = "Processors"
    ylabel = "Run Time"

    bars = []
    for pair in pairs:
        #print pair[0]
        
        bar = []
        lst = []
	l_protocol = "MESI_SMP_LogTM_directory"
        glob_str = "%s/%s/%s-%dp-*%s-*.stats" % (results_dir, pair[1], pair[1], procs, l_protocol)
        files = glob.glob(glob_str)
        if files == []:
            print "%s not found" % glob_str
            return
        for file in files:
            lst.append(get_int_stat(file, "Ruby_cycles")/1000000.0)
        cycles_base = mfgraph.mean(lst)
        base_conf = mfgraph.confidence_interval_95_percent(lst)
            
        lst = []
	#protocol="MESI_SMP_LogTM_directory"
        glob_str = "%s/%s/%s-%dp-*%s-*.stats" % (results_dir, pair[2], pair[2], procs, protocol)
        #glob_str = "%s/%s/%s-%dp*-default-%s*.stats" % (results_dir, pair[2], pair[2], procs, pair[3])
        files = glob.glob(glob_str)
        if files == []:
            print "%s not found" % glob_str
            return
        for file in files:
            lst.append(get_int_stat(file, "Ruby_cycles")/1000000.0)
        cycles_test = mfgraph.mean(lst)
        test_conf = mfgraph.confidence_interval_95_percent(lst)
            
        norm_exe = float(cycles_test)/float(cycles_base)
        #bar.append(["%d" % procs, norm_exe])
        bar += [pair[0],
                ["LogTM", [cycles_test, cycles_test - test_conf, cycles_test + test_conf]],
                ["Locks", [cycles_base, cycles_base - base_conf, cycles_base + base_conf]]
                ]
        bars.append(bar)

    jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             bar_segment_labels = ["runtime"],
                                             xsize = 5.0,
                                             ))

    graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Execution Time (in millions of cycles)",
                                          #ymax = 1.50,
                                          ymin = 0,
                                          xsize = 2.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "9",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "9",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["1 1 1",
                                                    ".8 .8 .8",
                                                    ".6 .6 .6",
                                                    ".4 .4 .4",
                                                    "0 0 0"])
    mfgraph.make_eps(graph_out, "%s_abs" % label, "xref_figures")
    if make_excel_files:
        mfgraph.make_excel_bar(name="splash_abs",
                               data=bars)
    
def make_distribution(jgraphs, benchmarks, procs, dist, stat):
    protocol = "MESI_SMP_LogTM_directory"
    lines = []
    max_index = 0
    for (name, benchmark) in benchmarks:
        #glob_str = "%s/%s/%s-%dp-default-%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
        glob_str = "%s/%s/%s-%dp*-*%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
        files = glob.glob(glob_str)
        if files == []:
            print "No files match: %s" % glob_str
            exit                
        data = []
        for file in files:
            grep_lines = mfgraph.grep(file, stat[0])
            tokens = string.split(grep_lines[0])
            lst = []
            pipe_count = 0
            count = 0
            count_next = 0
            bin_size_next = 0
            bin_size = 0
            index = 0
            for token in tokens:
                #print token
                if token == '|':
                    pipe_count += 1
                elif token == 'count:':
                    count_next = 1
                elif token == '[binsize:':
                    bin_size_next = 1
                elif bin_size_next == 1:
                    bin_size_next = 0
                    bin_size = int(token)
                    #print "BIN_SIZE = %d" % bin_size
                elif count_next == 1:
                    count_next = 0
                    count = int(token)
                elif token == ']':
                    pipe_count = 0
                elif pipe_count &gt;= 2:
                    lst.append([index*bin_size, float(token)])
                    index += 1
                    if index &gt; max_index:
                        max_index = index
            
            if dist == "CDF":
                data += mfgraph.cdf(count, lst)
            elif dist == "PDF":
                #lst.append([index, 0])
                data += mfgraph.pdf(count, lst)
            elif dist == "WPDF":
                data += mfgraph.w_pdf(count, lst)
            else:
                print "ERROR, distribution %s not supported." % dist
                exit(0)
                
        lines.append(mfgraph.merge_data(name, data))
        
        if dist == "CDF":
            for line in lines[1:]:
                line.append([max_index, 1.0, 1.0, 1.0])
            
    graph_out = mfgraph.line_graph(lines,
                                   title = "",
                                   xlabel = stat[1],
                                   ylabel = dist,
                                   xsize = 4.5
                                   )
    jgraphs.append(graph_out)

    graph_out = mfgraph.line_graph(lines,
                                   title = "",
                                   xlabel = stat[1],
                                   ylabel = dist,
                                   ymax = 1.0,
                                   title_fontsize = "12",
                                   label_fontsize = "8",
                                   bar_name_font_size = "9",
                                   legend_fontsize = "9",
                                   legend_x = "50",
                                   line_thickness = 1.5,
                                   #xlog = 2,
                                   xsize = 2.5,
                                   ysize = 1.778)
    mfgraph.make_eps(graph_out, "%s-%s" % (stat[2], dist), "xref_figures")
    if make_excel_files:
        mfgraph.make_excel_line(name="%s-%s" % (stat[2], dist),
                                data=lines)


def make_dist_table(benchmarks, procs, stat):
    protocol = "MESI_SMP_LogTM_directory"

    print
    print "DISTRIBUTION TABLE--%s" % stat[1]

    bins = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024]
    block_size = 64
    max_index = 0
    for (name, benchmark) in benchmarks:
        data = {}
        for bin in bins:
            data[bin] = 0;
        
        #glob_str = "%s/%s/%s-%dp-default-%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
        glob_str = "%s/%s/%s-%dp*-*%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
        files = glob.glob(glob_str)
        if files == []:
            print "No files match: %s" % glob_str
            exit                
        for file in files:
            grep_lines = mfgraph.grep(file, stat[0])
            tokens = string.split(grep_lines[0])
            lst = []
            pipe_count = 0
            count = 0
            count_next = 0
            bin_size_next = 0
            bin_size = 0
            index = 0
            for token in tokens:
                #print token
                if token == '|':
                    pipe_count += 1
                elif token == 'count:':
                    count_next = 1
                elif token == '[binsize:':
                    bin_size_next = 1
                elif bin_size_next == 1:
                    bin_size_next = 0
                    bin_size = int(token)
                    #print "BIN_SIZE = %d" % bin_size
                elif count_next == 1:
                    count_next = 0
                    count = int(token)
                elif token == ']':
                    pipe_count = 0
                elif pipe_count &gt;= 2:
                    lst.append([index*bin_size, int(token)])
                    index += 1
                    if index &gt; max_index:
                        max_index = index

            for tuple in lst:
                last_bin = 0
                for bin in bins:
                    if tuple[0] &gt;= last_bin and tuple[0] &lt; bin:
                        #print "adding %f" % tuple[1]
                        data[bin] = data[bin] + tuple[1]
                        last_bin = bin
        print "%s for %s: " % (stat[1], name)
        for bin in bins:
            print "%d B\t%d" % ((bin*block_size), data[bin])
            
def make_stall_histogram(jgraphs, benchmarks, protocol):
	stat = "xact_stall_occupancy"
	bins = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]
	processors = [4, 8, 12, 16, 32]
	bars = []    
	for (name, benchmark) in benchmarks:
		set = [name]	
		for procs in processors:
			#print name
			data = {}
        
			for bin in bins:
				data[bin] = 0

			total_cycles = 0
			trans_cycles = 0
			ruby_cycles = 0
			file_count = 0
			#glob_str = "%s/%s/%s-%dp-default-%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
			glob_str = "%s/%s/%s-%dp*-*%s*.stall" % (results_dir, benchmark, benchmark, procs, protocol)
			files = glob.glob(glob_str)
			if files == []:
				print "No files match: %s" % glob_str
				continue                
			for file in files:
				trans_cycles += get_int_stat(file, "Trans_cycles")
				file_count += 1
				grep_lines = mfgraph.grep(file, stat)
	            #print grep_lines
				bin = 0
				#print grep_lines
				for line in grep_lines:
					#print line
					tokens = string.split(line, ":")
					#print tokens
					cycles = string.atoi(tokens[1])
					total_cycles += cycles
					#print bin
					data[bin] += cycles
					bin += 1
	
			print name
			#print data
			#print "total_cycles = %d, ruby_cycles = %d" % (total_cycles, ruby_cycles)
			print "trans_cycles = %d" % (trans_cycles)
			temp = []
			bar = [procs]
			if file_count &gt; 0 :
				total = 0.0
				if (total_cycles == 0):
					total_cycles = 1	
				for bin in bins:
					#print "%d: %f" % (bin, float(data[bin])/float(total_cycles))
					point = float(data[bin])/float(trans_cycles)
					#point = float(data[bin])
					total += point
					temp.append(total)
				temp.reverse()
				bar += temp
				#print bar.reverse()
				set.append(bar)
		bars.append(set)
	print bars
	jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             title = "Stall Distribution graph",
											 ylabel = "Normalized Stall times",
                                             bar_segment_labels = ["runtime"],
                                             xsize = 6.5,
                                             #bar_name_rotate = 90.0,
                                             legend = "on",
                                             stack_name_location = 12,
											 colors = ["1 0 0", "0 0 0", "0 .5 0", "1 0 1", "1 1 0", "0.5 0 0"]
                                             ))


def make_xact_histogram(jgraphs, benchmarks, protocol):
	stat = "xact_occupancy"
	bins = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]
	processors = [4, 8, 12, 16, 32]
	bars = []    
	for (name, benchmark) in benchmarks:
		set = [name]	
		for procs in processors:
			#print name
			data = {}
        
			for bin in bins:
				data[bin] = 0

			total_cycles = 0
			trans_cycles = 0
			ruby_cycles = 0
			file_count = 0
			#glob_str = "%s/%s/%s-%dp-default-%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
			glob_str = "%s/%s/%s-%dp*-*%s*.stall" % (results_dir, benchmark, benchmark, procs, protocol)
			files = glob.glob(glob_str)
			if files == []:
				print "No files match: %s" % glob_str
				continue                
			for file in files:
				trans_cycles += get_int_stat(file, "Total_cycles")
				file_count += 1
				grep_lines = mfgraph.grep(file, stat)
	            #print grep_lines
				bin = 0
				#print grep_lines
				for line in grep_lines:
					if (bin == 0):
						data[bin] = 0
						bin += 1
						continue		
					#print line
					tokens = string.split(line, ":")
					#print tokens
					cycles = string.atoi(tokens[1])
					total_cycles += cycles
					#print bin
					data[bin] += cycles
					bin += 1
	
			print name
			#print data
			#print "total_cycles = %d, ruby_cycles = %d" % (total_cycles, ruby_cycles)
			print "total_cycles = %d" % (trans_cycles)
			temp = []
			bar = [procs]
			if file_count &gt; 0 :
				total = 0.0
				if (total_cycles == 0):
					total_cycles = 1	
				for bin in bins:
					#print "%d: %f" % (bin, float(data[bin])/float(total_cycles))
					point = float(data[bin])/float(trans_cycles)
					#point = float(data[bin])
					total += point
					temp.append(total)
				temp.reverse()
				bar += temp
				#print bar.reverse()
				set.append(bar)
		bars.append(set)
	print bars
	jgraphs.append(mfgraph.stacked_bar_graph(
            bars,
            title = "Xact Occupancy Distribution graph",
            ylabel = "Normalized Xact times",
            bar_segment_labels = ["runtime"],
            xsize = 6.5,
            #bar_name_rotate = 90.0,
            legend = "on",
            stack_name_location = 12,
            colors = ["1 0 0", "0 0 0", "0 .5 0", "1 0 1", "1 1 0", "0.5 0 0"]
            ))



def make_aborts_graph(jgraphs, benchmarks, protocol):

	bars = []
	bar = []
	processors = [1, 2, 4, 8, 16, 32]
	for (name, benchmark) in benchmarks:
		set = [name]
		for procs in processors:		
			glob_str = "%s/%s/%s-%dp-*%s*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
			files = glob.glob(glob_str)
			lst = []
			count = []
			if files == []:
				print "%s not found" % glob_str
				continue
			for file in files:
				lst.append(get_int_stat(file, "xact_aborts"))
				count.append(get_int_stat(file, "^Begin_XACT"))	
			mean_aborts = mfgraph.mean(lst)
			mean_count = mfgraph.mean(count)
			if files == []:
				mean_aborts = 0
				mean_count = 1
			norm_abort = mean_aborts / mean_count
			set.append([procs,norm_abort])
		bars.append(set)

			
        #test_conf = mfgraph.confidence_interval_95_percent(lst)
            
        #norm_exe = float(cycles_test)/float(cycles_base)
        #bar.append(["%d" % procs, norm_exe])
        #bar += [pair[0],
        #        ["LogTM", [cycles_test, cycles_test - test_conf, cycles_test + test_conf]],
        #        ["Locks", [cycles_base, cycles_base - base_conf, cycles_base + base_conf]]
        #        ]
	print bars
	jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             title = "Transactional Aborts",
											 ylabel = "Aborts/Transaction",
                                             bar_segment_labels = ["runtime"],
                                             xsize = 5.0,
                                             ))

	graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "Transactional Aborts",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Aborts/Transaction",
                                          #ymax = 1.50,
                                          ymin = 0,
                                          xsize = 2.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "9",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "9",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["1 0 1",
                                                    ".8 .8 .8",
                                                    ".6 .6 .6",
                                                    ".4 .4 .4",
                                                    "0 0 0"])
    

def make_aborts_delta(jgraphs, pairs, protocol, label):
    xlabel = "Processors"
    ylabel = "Speedup"

    bars = []
    proc_values = [1, 2, 4, 8, 16, 32]
    for pair in pairs:
        bar = [pair[0]]
        for procs in proc_values:
            lst = []
            glob_str = "%s/%s/%s-%dp*-*%s-*.stats" % (results_dir, pair[1], pair[1], procs, protocol)
            files = glob.glob(glob_str)
            if files == []:
                print "%s not found" % glob_str
                continue
            for file in files:
                lst.append(get_int_stat(file, "xact_aborts"))
            aborts_base = mfgraph.mean(lst)

            lst = []
            #files = glob.glob("%s/%s/%s-%dp-default-%s*.stats" % (results_dir, pair[2], pair[2], procs, protocol))
            #protocol = "MESI_SMP_LogTM_directory"
            glob_str = "%s/%s/%s-%dp*-*%s-*.stats" % (results_dir, pair[2], pair[2], procs, protocol)
            files = glob.glob(glob_str)
            if files == []:
                print "%s not found" % glob_str
                #return
                continue
            for file in files:
                print get_int_stat(file, "xact_aborts")
                lst.append(get_int_stat(file, "xact_aborts"))
            aborts_test = mfgraph.mean(lst)
                        
            print "aborts_test: %d, aborts_base %d", (aborts_test, aborts_base)
            delta_aborts = aborts_test - aborts_base
            bar.append(["%d" % procs, delta_aborts])
        bars.append(bar)

    jgraphs.append(mfgraph.stacked_bar_graph(bars,
                                             bar_segment_labels = ["runtime"],
                                             ymin = 0,
                                             #xsize = 5.0,
                                             #ymax = 5.0,
                                             ))

    graph_out = mfgraph.stacked_bar_graph(bars,
                                          title = "",
                                          bar_segment_labels = ["Execution Time"],
                                          ylabel = "Speedup ",
                                          ymax = 5.0,
                                          xsize = 2.5,
                                          ysize = 1.778,
                                          title_fontsize = "12",
                                          label_fontsize = "8",
                                          bar_name_font_size = "5",
                                          legend_fontsize = "9",
                                          stack_name_font_size = "5",
                                          stack_name_location = 9,
                                          bar_space = 1.2,
                                          colors = ["1 0 1",
                                                    ".8 .8 .8",
                                                    ".6 .6 .6",
                                                    ".4 .4 .4",
                                                    "0 0 0"])
    mfgraph.make_eps(graph_out, "aborts_delta_%s" % label, "xref_figures")
    if make_excel_files:
        print bars    
        mfgraph.make_excel_bar("aborts_delta_%s" % label, bars) 


def make_scalability_lines(jgraphs, benchmarks, protocol):
    xlabel = "Processors"
    ylabel = "Run Time"

    proc_values = [1, 2, 4, 8, 12, 16, 32]				   
    data = []
    
    for (name,benchmark) in benchmarks:
        for procs in proc_values:
            #print procs
            lst = []
            #read_set_str = "%s" % read_set
            glob_str = "%s/%s/%s*-%dp-*%s-%d-*.stats" % (results_dir, benchmark, benchmark, procs, protocol)
            files = glob.glob(glob_str)
            if files == []:
                print "No files match: %s" % glob_str
                #exit
                continue
            for file in files:
                #print file
                ruby_cycles = get_float_stat(file, "Ruby_cycles")/1000000.0
                #divide by procs?
                
                #print "ruby_cycles: %d" % ruby_cycles
                lst.append(ruby_cycles)
                cycles = mfgraph.mean(lst)
            #print lst
            conf = mfgraph.confidence_interval_95_percent(lst)

            line.append([procs, cycles, cycles - conf, cycles + conf])
        data.append(line)

    #print data
    jgraphs.append(mfgraph.line_graph(data,
                                      title = name,
                                      ylabel = "Execution Time (in millions of cycles)",
                                      xlabel = "Threads",
                                      xsize = 4.5,
                                      ysize = 8.0,
                                      line_thickness = 2.0,
                                      legend_x = "90",
                                      marktype = ["circle", "box", "triangle", "diamond"],
                                      #marksize = [0.4, 0.4, 0.4, 0.5],
                                      ))
    

        
#================ End Functions =========================================================


all_benchmarks = ("raytrace-trans_1",
                  "raytrace-locks_1",
                  #"radiosity-trans_1",
                  #"radiosity-locks_1",
                  "ocean-trans_66_1",
                  "ocean-locks_66_1",
                  #"barnes-trans_512_1",
                  #"barnes-locks_512_1",
                  "water-nsquared-trans-216_1",
                  "water-nsquared-locks-216_1",
                  )

directory_protocols = {}
directory_protocols["lock"] = "MESI_SMP_LogTM_directory";
directory_protocols["trans"] = "MESI_SMP_LogTM_directory";

bcast_protocols = {}
bcast_protocols["lock"] = "MOESI_hammer";
bcast_protocols["trans"] = "MOESI_xact_hammer_bf";


raytrace_runs = [
    ("Base", "raytrace-TM-teapot", "trans"),
    ("XOrder", "raytrace-XORDER-teapot", "trans")
    ]

radiosity_runs = [
    ("Base", "radiosity-TM-None", "trans"),
    ("XOrder", "radiosity-XORDER-None", "trans")
    ]

cholesky14_runs = [
    ("Base", "cholesky-TM-14", "trans"),
    ("XOrder", "cholesky-XORDER-14", "trans")
    ]

cholesky15_runs = [
    ("Base", "cholesky-TM-15", "trans"),
    ("XOrder", "cholesky-XORDER-15", "trans")
    ]

mp3d_runs = [
    ("Base", "mp3d-TM-128mol-1024ops", "trans"),
    ("XOrder", "mp3d-XORDER-128mol-1024ops", "trans")
    ]

splash_perf_runs = [
    ("RAYTRACE",   "raytrace-TM-teapot",     "raytrace-XORDER-teapot"),
    ("RADIOSITY",  "radiosity-TM-None",      "radiosity-XORDER-None"),
    ("CHOLESKY",   "cholesky-TM-14",         "cholesky-XORDER-14"),
    ("MP3D",       "mp3d-TM-128mol-1024ops", "mp3d-XORDER-128mol-1024ops")
    #("BARNES",     "barnes-TM-512",      "barnes-XORDER-512"),
    #("BTREE-20",  "btree-TM-priv-alloc-20pct", "btree-XORDER-None"),
    ]

log_size      = ("xact_log_size_dist", "Transaction Log Size (in 64-Byte lines)", "write_set")
read_set_size = ("xact_read_set_size_dist", "Transaction Read Set Size (in 64-Byte lines)", "read_set")

jgraph_input = []


# # microbenchmark performance
#make_microbench_line(jgraph_input, "raytrace", raytrace_runs, 10000, directory_protocols,  "dir")
#make_microbench_speedup_line(jgraph_input, "raytrace", raytrace_runs, 10000, directory_protocols,  "dir")
make_microbench_line(jgraph_input, "radiosity", radiosity_runs, 10000, directory_protocols,  "dir")
make_microbench_speedup_line(jgraph_input, "radiosity", radiosity_runs, 10000, directory_protocols,  "dir")
#make_microbench_line(jgraph_input, "cholesky", cholesky14_runs, 10000, directory_protocols,  "dir")
#make_microbench_speedup_line(jgraph_input, "cholesky", cholesky14_runs, 10000, directory_protocols,  "dir")
#make_microbench_line(jgraph_input, "mp3d", mp3d_runs, 10000, directory_protocols,  "dir")
#make_microbench_speedup_line(jgraph_input, "mp3d", mp3d_runs, 10000, directory_protocols,  "dir")

make_norm_runtime(jgraph_input, splash_perf_runs, "MESI_SMP_LogTM_directory", "xorder")
make_aborts_delta(jgraph_input, splash_perf_runs, "MESI_SMP_LogTM_directory", "xorder")

mfgraph.run_jgraph("newpage\n".join(jgraph_input), "xref")

#make_dist_table(splash_dist_runs, 16, read_set_size)
#make_dist_table(splash_dist_runs, 16, log_size)
</pre></body></html>