Skip to content
Snippets Groups Projects
Commit f06e1063 authored by Guillaume Damiand's avatar Guillaume Damiand
Browse files

Add scripts; update readme

parent f0e66e00
No related branches found
No related tags found
No related merge requests found
# stripped-hds
Stripped halfedge data structure for parallel computation of arrangements of segments
\ No newline at end of file
Stripped halfedge data structure for parallel computation of arrangements of segments.
Let us denote by SRC_DIR the root directory of the gitlab repository.
1. Each data is a text file containing a list of segments. In the gitlab repository, these files are compressed.
To uncompress all the data files:
# We suppose the current directory is SRC_DIR
gunzip -r data/
2. Install required dependencies
Boost, with components system and filesystem
Intel TBB
Qt5
CGAL, with component Qt5
3. Compile the programs
# We suppose the current directory is SRC_DIR
mkdir builds
cd builds
mkdir release
cd release
cmake -DCMAKE_BUILD_TYPE=Release ../../src
make
4. Run the main program
One example of run:
# We suppose the current directory is SRC_DIR/builds/release
./parallel-arrangement -t1 ../../data/mixerP4.txt -nbs 8 -nbt 8 -crop -draw
The parameter after -t1 is the filename
-nbs gives the number of strips
-nbt gives the number of thread
-crop is an option to crop segments in the strips
-draw will draw the final arrangement
Run
./parallel-arrangement -h
for an help showing all options.
5. You can use different scripts to reproduce results of the paper
5.1 To compare computation times of CGAL and our method, for GIS dataset.
# We suppose the current directory is SRC_DIR
./run-gis-test.sh 5
Run our program on all the gis data.
5 is the number of runs for each file (in the paper we use 5 runs);
1 is used by default.
Computation times of the method are stored in file times.dat;
number of cells in nbcells.dat; and times of the different parts
of our method in detailed-times.dat.
5.2 To draw computation time graphs for stochastics dataset
You need python3, numpy, matplotlib
# We suppose the current directory is SRC_DIR
python3 python_scripts/run_benchmark.py --log outputmixerP4.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/mixerP4.txt
Run our program on the given dataset, and store the log output in a file.
--log gives the log filename
--thread gives the number of threads to test
--repeat gives the number of runs for each file (in the paper we use 5 runs)
--crop is an option to crop segments in the strips
last parameter is a directory of a list of files to test. When a directory is used,
the program is ran for each file in the directory.
In the paper we used:
python3 python_scripts/run_benchmark.py --log outputmixerP4.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/mixerP4.txt
python3 python_scripts/run_benchmark.py --log FRA.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/countries/FRA.txt
python3 python_scripts/run_benchmark.py --log gauss-short.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/gauss-short/
python3 python_scripts/run_benchmark.py --log gauss-long.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/gauss-long/
python3 python_scripts/run_benchmark.py --log best.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/best/
python3 python_scripts/run_benchmark.py --log worst.log --thread 1 2 4 8 16 32 --repeat 5 --crop data/worst/
To visualize the results, you need to use plot_result.py, for example:
python3 python_scripts/plot_result.py --no-load --no-error outputmixerP4.log
python3 python_scripts/plot_result.py --no-load --no-error FRA.log
python3 python_scripts/plot_result.py --no-load --no-error gauss-short.log
python3 python_scripts/plot_result.py --no-load --no-error gauss-long.log
python3 python_scripts/plot_result.py --no-load --no-error best.log
python3 python_scripts/plot_result.py --no-load --no-error worst.log
\ No newline at end of file
import argparse
import itertools
import os
import subprocess
import sys
import time
from enum import Enum
RANDOM_SEGMENTS_BIN = "../src/builds/release/tools/random-segments"
SHP_TO_TXT_BIN = "../src/builds/release/tools/shp-to-txt"
class Profile(Enum):
GAUSS = "gauss"
RAND = "rand"
BEST = "best"
WORST = "worst"
def __str__(self):
return self.value
def __repr__(self):
return str(self)
def bin_exist(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
def random_segments(xmin=0.0, xmax=10000.0, ymin=0.0, ymax=10000.0,
n=100, size=1000.0, error=0.05,
output="./segments.txt",
seed=None, fixed=False):
args = [
RANDOM_SEGMENTS_BIN,
"--xmin", str(xmin),
"--xmax", str(xmax),
"--ymin", str(ymin),
"--ymax", str(ymax),
"-n", str(n),
"--size", str(size),
"--error", str(error),
"--output", output
]
if seed:
args.append("--seed")
args.append(str(seed))
if fixed:
args.append("--fixed ")
proc = subprocess.run(args, stdout=subprocess.PIPE)
return proc
def shp_to_txt(profile, n,
band=None, size=None,
output=None):
args = [
SHP_TO_TXT_BIN
]
if profile == Profile.RAND:
args.extend([ "-t2", str(n), str(size) ])
if size == 0.0:
filename = "random-%d.txt" % n
else:
filename = "random-%d-size-%.2f.txt" % (n, size)
elif profile == Profile.BEST:
args.extend([ "-t5", str(n), str(band), str(size) ])
if band == 1:
if size == 0.0:
filename = "random-%d.txt" % n
else:
filename = "random-%d-size-%.2f.txt" % (n, size)
else:
if size == 0.0:
filename = "random-%d-best-%d.txt" % (n, band)
else:
filename = "random-%d-size-%.2f-best-%d.txt" % (n, size, band)
elif profile == Profile.WORST:
args.extend([ "-t6", str(n) ])
filename = "random-%d-worst.txt" % (n)
else:
raise NameError("'" + profile + "' is not a valid profile for shp-to-txt.")
proc = subprocess.run(args, stdout=subprocess.PIPE)
if output:
os.rename(filename, output)
return proc
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--profile", "-p", required=True, type=Profile,
choices=list(Profile),
help="set generation profile")
parser.add_argument("--nseg", "-n", required=True, type=int, nargs="+",
help="set number of segments")
parser.add_argument("--size", "-s", required=True, type=float, nargs="+",
help="set size of segments")
parser.add_argument("--batch", "-b", required=True, type=int,
help="set number of segments generations")
parser.add_argument('--verbose', '-v', action='count', default=0,
help="set verbosity level")
parser.add_argument("--dir", "-d", default=None,
help="set generated data directory, default is cwd")
gaussg = parser.add_argument_group("gauss profile configuration")
gaussg.add_argument("--error", type=float, default=0.10,
help="set segment length error (=2*sigma/mu), default value is 0.10")
gaussg.add_argument("--fixed", action="store_true",
help="fix segment size")
bestgr = parser.add_argument_group("best profile configuration")
bestgr.add_argument("--band", type=int, nargs="+",
required=(str(Profile.BEST) in sys.argv),
help="set numbers of bands")
binary = parser.add_argument_group("binaries configuration")
binary.add_argument("--bin-gauss", default=RANDOM_SEGMENTS_BIN,
help="set 'random-segments' binary path, default value is set line 10")
binary.add_argument("--bin-shp", default=SHP_TO_TXT_BIN,
help="set 'shp-to-txt' binary path, default value is set line 11")
args = parser.parse_args()
RANDOM_SEGMENTS_BIN = args.bin_gauss
SHP_TO_TXT_BIN = args.bin_shp
if args.dir == None:
args.dir = os.getcwd()
if not bin_exist(RANDOM_SEGMENTS_BIN):
raise NameError("'" + RANDOM_SEGMENTS_BIN + "' random-segments path does not exist. "
"Set variable 'RANDOM_SEGMENTS_BIN' line 10 or option --bin-gauss to a correct path.")
if not bin_exist(SHP_TO_TXT_BIN):
raise NameError("'" + SHP_TO_TXT_BIN + "' shp-to-txt path does not exist. "
"Set variable 'SHP_TO_TXT_BIN' line 11 or option --bin-shp to a correct path.")
if not os.path.isdir(args.dir):
raise NameError("'" + args.dir + "' directory does not exist.")
# Si le mode est 'worst' alors la taille des segments est inutiles.
if args.profile == Profile.WORST:
args.size = [0]
iter = 0
total_iter = len(args.nseg) * len(args.size) * args.batch
if args.profile == Profile.BEST:
total_iter *= len(args.band)
def advance_and_print():
global iter
iter += 1
if args.verbose > 1:
print("progress: %d / %d" % (iter, total_iter))
def format_path(nseg, size, i, band=None):
filename = "data-%s-%d" % (args.profile, nseg)
if args.profile != Profile.WORST:
filename += "-size-%d" % size
if args.profile == Profile.BEST and band != None:
filename += "-band-%d" % band
filename += "-%d.txt" % i
return os.path.join(args.dir, filename)
start = time.time()
for nseg, size, i in itertools.product(args.nseg, args.size, range(args.batch)):
if args.profile == Profile.BEST:
for band in args.band:
shp_to_txt(Profile.BEST, nseg, size=size, band=band,
output=format_path(nseg, size, i, band=band))
advance_and_print()
elif args.profile == Profile.GAUSS:
random_segments(n=nseg, size=size, error=args.error, fixed=args.fixed,
output=format_path(nseg, size, i))
advance_and_print()
else:
shp_to_txt(args.profile, nseg, size=size,
output=format_path(nseg, size, i))
advance_and_print()
stop = time.time()
if args.verbose:
print("ellapsed time:", stop - start, "sec")
import argparse
import numpy as np
import matplotlib.collections as mcollec
import matplotlib.colors as mcolors
import matplotlib.pyplot as plt
import os
def make_size_bar(ax, size, bins):
ax.hist(size, bins=bins, density=True)
ax.set_xlabel("size")
ax.set_ylabel("frequency")
ax.grid(True)
return ax
def make_dir_bar(ax, dir, bins):
ax.hist(dir, bins=bins, density=True)
ax.set_xlabel("direction [deg]")
ax.set_ylabel("frequency")
ax.grid(True)
return ax
def make_pos_bar(ax, x, y, bins):
hist = ax.hist2d(x, y, bins=bins, density=True)
cbar = plt.colorbar(hist[3], ax=ax)
ax.set_xlabel("x")
ax.set_ylabel("y")
cbar.ax.set_ylabel("frequency")
return ax
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("input",
help="input result file")
parser.add_argument("--bins", "-b", type=int, default=16,
help="set histogram number of bins")
parser.add_argument("--split", "-s", action="store_true",
help="one figure per graph instead of subplots")
parser.add_argument("--draw", "-d", action="store",
help="draw segments on another figure and export to PDF")
parser.add_argument("--export", "-e", action="store_true",
help="export histograms")
parser.add_argument("--noshow", "-n", action="store_true",
help="dont show figures")
parser.add_argument("--ext", "-x", action="store", default="png",
help="set export extension, default is png")
args = parser.parse_args()
data = np.genfromtxt(args.input, dtype=float, skip_header=1)
x0 = data[:, 0]
y0 = data[:, 1]
x1 = data[:, 2]
y1 = data[:, 3]
dx = x1 - x0
dy = y1 - y0
size = np.sqrt(np.square(dx) + np.square(dy))
dir = np.arctan(dy / dx) * 180.0 / np.pi
x = 0.5 * (x0 + x1)
y = 0.5 * (y0 + y1)
basename = os.path.splitext(os.path.basename(args.input))[0]
if args.split:
fig0 = plt.figure()
fig1 = plt.figure()
fig2 = plt.figure()
make_size_bar(fig0.add_subplot(), size, args.bins)
make_dir_bar (fig1.add_subplot(), dir, args.bins)
make_pos_bar (fig2.add_subplot(), x, y, args.bins)
if args.export:
fig0.savefig("%s-hist-size.%s" % (basename, args.ext))
fig1.savefig("%s-hist-dir.%s" % (basename, args.ext))
fig2.savefig("%s-hist-pos.%s" % (basename, args.ext))
else:
fig = plt.figure()
make_size_bar(fig.add_subplot(1, 3, 1), size, args.bins)
make_dir_bar (fig.add_subplot(1, 3, 2), dir, args.bins)
make_pos_bar (fig.add_subplot(1, 3, 3), x, y, args.bins)
if args.export:
fig.savefig("%s-hist.%s" % (basename, args.ext))
if args.draw:
palette = list(mcolors.TABLEAU_COLORS.values())
n = len(palette)
lines = []
colors = []
for i in range(data.shape[0]):
row = data[i, :]
lines.append([(row[0], row[1]), (row[2], row[3])])
colors.append(palette[i % n])
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1])
ax.add_collection(mcollec.LineCollection(lines, color=colors))
ax.autoscale()
ax.axis('off')
fig.savefig(args.draw)
if not args.noshow:
plt.show()
import argparse
import numpy as np
import matplotlib.pyplot as plt
import os
import re
## Le Thread.log c'est le resultat de
# cat toto.log | grep "\[Threa" >! toto-thread.log
with open("toto-thread.log") as file:
for line in file:
p = re.compile( 'time=\d+.\d+' )
alltimings = p.findall( line )
print(len(alltimings))
allT = []
for athread in alltimings:
p = re.compile( '\d+.\d+' )
t = p.findall( athread )
allT = allT + t
print(allT)
import argparse
import itertools
import numpy as np
import matplotlib.colors as mcolors
import matplotlib.pyplot as plt
import os
import warnings
# PB: see "matplotlib named_colors"
PREFERED_COLORS = {
"load": mcolors.TABLEAU_COLORS["tab:blue"],
"local": mcolors.TABLEAU_COLORS["tab:orange"],
"global": mcolors.TABLEAU_COLORS["tab:green"],
"idle": mcolors.TABLEAU_COLORS["tab:red"],
}
def last_word(line):
return line.rsplit(" ", 1)[1]
def parse_total(lines,
cgal=False):
index = 4 if cgal else 10
return float(last_word(lines[index]))
def parse_load(lines,
cgal=False):
index = 3 if cgal else 7
return float(last_word(lines[index]))
def parse_local(lines):
return float(last_word(lines[8]))
def parse_global(lines):
return float(last_word(lines[9]))
def parse_thread(lines):
kv = {}
for line in lines[1].split("[")[1:]: # data per thread
line = line.split()
b = int(line[2].split("=")[1])
t = float(line[-1].split("=")[1])
kv[b] = t
return [ kv[k] for k in sorted(kv) ]
def parse_memory(lines,
cgal=False):
index = 2 if cgal else 6
return int(lines[index].split(" ", 3)[2])
def parse_traverse(lines):
lines = lines[11].split(" times ", 1)
n = int(lines[0].rsplit(" ", 1)[1])
t = float(lines[1].split(" ", 1)[0])
return t / n
def parse_cdarts(lines):
total = 0
for line in lines[3].split("|")[0:-1]: # data per band
line = line.rsplit(", ", 1)
lcd = int(line[0].rsplit("(", 1)[1])
rcd = int(line[1].rsplit(")", 1)[0])
total += lcd + rcd
return total
def read_log(filename):
data = {}
with open(filename) as file:
it = iter(file)
for _ in it:
line = next(it).strip()
method = line.rsplit(" ", 1)[1]
if method == "cgal":
if "cgal" not in data:
data["cgal"] = {
"total": { "detail": [] },
"load": { "detail": [] },
"global": { "detail": [] },
"memory": { "detail": [] }
}
entry = data["cgal"]
lines = [ next(it).strip() for _ in range(5) ]
total = parse_total (lines, cgal=True)
load = parse_load (lines, cgal=True)
entry["total"]["detail"] .append(total)
entry["load"] ["detail"] .append(load)
entry["global"]["detail"].append(total - load)
entry["memory"]["detail"].append(parse_memory(lines, cgal=True))
else:
nbthread = int(method)
if method not in data:
data[method] = {
"total": { "detail": [] },
"load" : { "detail": [] },
"local": { "detail": [] },
"global": { "detail": [] },
"thread": { "detail": [ [] for _ in range(nbthread) ] },
"memory": { "detail": [] },
"traverse": { "detail": [] },
"cdarts": { "detail": [] }
}
entry = data[method]
lines = [ next(it).strip() for _ in range(12) ]
entry["total"] ["detail"].append(parse_total (lines))
entry["load"] ["detail"].append(parse_load (lines))
entry["local"] ["detail"].append(parse_local (lines))
entry["global"] ["detail"].append(parse_global(lines))
entry["memory"] ["detail"].append(parse_memory(lines))
entry["traverse"]["detail"].append(parse_traverse(lines))
entry["cdarts"] ["detail"].append(parse_cdarts(lines))
try :
thread = parse_thread(lines)
for i in range(nbthread):
entry["thread"]["detail"][i].append(thread[i])
except:
warnings.warn("found thread without band in")
for i in range(nbthread):
entry["thread"]["detail"][i].append(np.nan)
for method in data:
for key in data[method]:
entry = data[method][key]
arr = np.array(entry["detail"]).transpose()
entry["mean"] = np.nanmean(arr, axis=0)
entry["dev"] = np.nanstd(arr, axis=0)
return data
def remove_idle(data):
for m in data:
if m != "cgal":
t_local = data[m]["local"] ["mean"]
t_thread = data[m]["thread"]["mean"]
offset = np.min(t_local - t_thread)
data[m]["local"]["mean"] -= offset
data[m]["total"]["mean"] -= offset
return data
#def fill_cgal(data):
#
# if "cgal" in data:
#
# entry = data["cgal"]
#
# entry["load"] = { "mean": 0.0, "dev": 0.0 }
# entry["local"] = { "mean": 0.0, "dev": 0.0 }
# entry["global"] = entry["total"]
# entry["thread"] = { "mean": np.array([ 0.0 ]), "dev": np.array([ 0.0 ]) }
# entry["memory"] = { "mean": 0.0, "dev": 0.0 }
# entry["traverse"] = { "mean": 0.0, "dev": 0.0 }
#
# return data
#
#def extract_field(data, methods, field):
# mean = list(map(lambda m: data[m][field]["mean"], methods))
# dev = list(map(lambda m: data[m][field]["dev"], methods))
# return mean, dev
def extract_field(data, methods, field):
mean = []
dev = []
for m in methods:
if field in data[m]:
mean.append(data[m][field]["mean"])
dev .append(data[m][field]["dev"])
else:
if field == "thread":
mean.append([ 0.0 ])
dev .append([ 0.0 ])
else:
mean.append(0.0)
dev .append(0.0)
return mean, dev
def make_simple_bar(ax, data, methods, field, without_error):
mean, dev = extract_field(data, methods, field)
ind = np.arange(0, len(methods))
b = ax.bar(ind, mean, yerr=dev)
if without_error:
for e in b.errorbar[2]:
e.remove()
ax.set_xticks(ind)
ax.set_xticklabels(methods)
ax.grid(True)
return ax
def make_total_bar(ax, data,
without_error=False, without_load=False):
methods = data.keys()
mean, dev = extract_field(data, methods, "total")
ind = np.arange(0, len(methods))
if without_load:
load_mean, _ = extract_field(data, methods, "load")
y = np.array(mean) - np.array(load_mean)
else:
y = mean
b = ax.bar(ind, y, yerr=dev)
if without_error:
for e in b.errorbar[2]:
e.remove()
ax.set_xticks(ind)
ax.set_xticklabels(methods)
ax.set_xlabel("method - threads")
ax.set_ylabel("duration [sec]")
ax.grid(True)
return ax
def make_step_bar(ax, data,
without_error=False, without_load=False):
methods = data.keys()
t0_mean, t0_dev = extract_field(data, methods, "load")
t1_mean, t1_dev = extract_field(data, methods, "local")
t2_mean, t2_dev = extract_field(data, methods, "global")
ind = np.arange(0, len(methods))
if without_load:
p1 = ax.bar(ind, t1_mean, yerr=t1_dev, color=PREFERED_COLORS["local"])
p2 = ax.bar(ind, t2_mean, yerr=t2_dev, bottom=t1_mean, color=PREFERED_COLORS["global"])
bars = (p1, p2)
steps = ("local", "global")
else:
p0 = ax.bar(ind, t0_mean, yerr=t0_dev, color=PREFERED_COLORS["load"])
p1 = ax.bar(ind, t1_mean, yerr=t1_dev, bottom=t0_mean, color=PREFERED_COLORS["local"])
p2 = ax.bar(ind, t2_mean, yerr=t2_dev, bottom=(np.array(t0_mean) + np.array(t1_mean)), color=PREFERED_COLORS["global"])
bars = (p0, p1, p2)
steps = ("load", "local", "global")
if without_error:
for e in itertools.chain(*[ b.errorbar[2] for b in bars ]):
e.remove()
ax.set_xticks(ind)
ax.set_xticklabels(methods)
ax.set_xlabel("method - threads")
ax.set_ylabel("duration [sec]")
ax.grid(True)
ax.legend([ b[0] for b in bars ], steps)
return ax
def make_detail_bar(ax, data,
without_error=False, without_load=False):
methods = list(data.keys())
t0_mean, t0_dev = extract_field(data, methods, "load")
t1_mean, _ = extract_field(data, methods, "local")
t2_mean, t2_dev = extract_field(data, methods, "global")
t3_mean, t3_dev = extract_field(data, methods, "thread")
ind = np.arange(0, len(methods))
w = 0.8
x = []
height = []
width = []
yerr = []
bottom = []
height_idle = []
bottom_idle = []
for i in range(len(methods)):
n = 1 if methods[i] == "cgal" else int(methods[i])
wi = w / float(n)
a = (w - wi) / (float(n) - 1.0) if n != 1 else 0.0
b = float(ind[i]) + 0.5 * (wi - w)
for j in range(n):
t0_mean_i = 0.0 if without_load else t0_mean[i]
x .append(a * j + b)
height.append(t3_mean[i][j])
width .append(wi)
yerr .append(t3_dev[i][j])
bottom.append(t0_mean_i)
height_idle.append(t1_mean[i] - t3_mean[i][j])
bottom_idle.append(t0_mean_i + t3_mean[i][j])
if without_load:
p1 = ax.bar(x, height, yerr=yerr, bottom=bottom, width=width, color=PREFERED_COLORS["local"])
p2 = ax.bar(ind, t2_mean, yerr=t2_dev, bottom=t1_mean, color=PREFERED_COLORS["global"])
p_idle = ax.bar(x, height_idle, bottom=bottom_idle, width=width, color=PREFERED_COLORS["idle"])
bars = (p1, p2, p_idle)
steps = ("local", "global", "idle")
bars_with_error = (p1, p2)
else:
p0 = ax.bar(ind, t0_mean, yerr=t0_dev, color=PREFERED_COLORS["load"])
p1 = ax.bar(x, height, yerr=yerr, bottom=bottom, width=width, color=PREFERED_COLORS["local"])
p2 = ax.bar(ind, t2_mean, yerr=t2_dev, bottom=(np.array(t0_mean) + np.array(t1_mean)), color=PREFERED_COLORS["global"])
p_idle = ax.bar(x, height_idle, bottom=bottom_idle, width=width, color=PREFERED_COLORS["idle"])
bars = (p0, p1, p2, p_idle)
steps = ("load", "local", "global", "idle")
bars_with_error = (p0, p1, p2)
if without_error:
for e in itertools.chain(*[ b.errorbar[2] for b in bars_with_error ]):
e.remove()
else:
# PB: hide thread error
for e in p1.errorbar[2]:
e.remove()
ax.set_xticks(ind)
ax.set_xticklabels(methods)
ax.set_xlabel("method - threads")
ax.set_ylabel("duration [sec]")
ax.grid(True)
ax.legend([ b[0] for b in bars ], steps)
return ax
def make_memory_bar(ax, data,
without_error=False):
ax = make_simple_bar(ax, data, data.keys(), "memory", without_error)
ax.set_xlabel("threads")
ax.set_ylabel("memory [bytes]")
return ax
def make_traverse_bar(ax, data,
without_error=False):
methods = list(filter(lambda m: m != "cgal", data.keys()))
ax = make_simple_bar(ax, data, methods, "traverse", without_error)
ax.set_xlabel("threads")
ax.set_ylabel("traversal duration [sec]")
return ax
def print_data(data, *args,
spacing=20):
unsupported = [ "thread" ]
methods = data.keys()
row_format = ("{:>" + str(spacing) + "}") * (len(methods) + 1)
print(row_format.format("", *methods))
for field in args:
if field in unsupported:
warnings.warn("'print_data' does not support '" + field + "' field")
continue
mean, _ = extract_field(data, methods, field)
print(row_format.format(field, *mean))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("input", nargs="*",
help="input result files")
parser.add_argument("--split", "-s", action="store_true",
help="one figure per graph instead of subplots")
parser.add_argument("--export", "-e", action="store_true",
help="export plots")
parser.add_argument("--ext", "-x", action="store", default="png",
help="set export extension, default is png")
parser.add_argument("--no-error", action="store_true",
help="hide error bar")
parser.add_argument("--no-load", action="store_true",
help="hide load bar")
parser.add_argument("--hide", action="store_true",
help="disable show")
args = parser.parse_args()
for input in args.input:
data = read_log(input)
# PB: hack to remove idle
data = remove_idle(data)
basename = os.path.splitext(os.path.basename(input))[0]
# if args.split:
if True:
#fig0 = plt.figure()
#fig1 = plt.figure()
fig2 = plt.figure()
#fig3 = plt.figure()
#fig4 = plt.figure()
#fig0.canvas.set_window_title("%s - total" % input)
#fig1.canvas.set_window_title("%s - step" % input)
fig2.canvas.set_window_title("%s - detail" % input)
#fig3.canvas.set_window_title("%s - memory" % input)
#fig4.canvas.set_window_title("%s - traverse" % input)
#ax0 = fig0.add_subplot()
#ax1 = fig1.add_subplot(sharex=ax0, sharey=ax0)
ax2 = fig2.add_subplot() #sharex=ax0, sharey=ax0)
#ax3 = fig3.add_subplot()
#ax4 = fig4.add_subplot()
#make_total_bar (ax0, data, without_error=args.no_error, without_load=args.no_load)
#make_step_bar (ax1, data, without_error=args.no_error, without_load=args.no_load)
make_detail_bar (ax2, data, without_error=args.no_error, without_load=args.no_load)
#make_memory_bar (ax3, data, without_error=args.no_error)
#make_traverse_bar(ax4, data, without_error=args.no_error)
if args.export:
#fig0.savefig("%s-total.%s" % (basename, args.ext))
#fig1.savefig("%s-step.%s" % (basename, args.ext))
fig2.savefig("%s-detail.%s" % (basename, args.ext))
#fig3.savefig("%s-memory.%s" % (basename, args.ext))
#fig4.savefig("%s-traverse.%s" % (basename, args.ext))
# else:
# fig = plt.figure()
# fig.canvas.set_window_title(input)
# ax0 = fig.add_subplot(1, 5, 1)
# ax1 = fig.add_subplot(1, 5, 2, sharex=ax0, sharey=ax0)
# ax2 = fig.add_subplot(1, 5, 3, sharex=ax0, sharey=ax0)
# ax3 = fig.add_subplot(1, 5, 4)
# ax4 = fig.add_subplot(1, 5, 5)
# make_total_bar (ax0, data, without_error=args.no_error, without_load=args.no_load)
# make_step_bar (ax1, data, without_error=args.no_error, without_load=args.no_load)
# make_detail_bar (ax2, data, without_error=args.no_error, without_load=args.no_load)
# make_memory_bar (ax3, data, without_error=args.no_error)
# make_traverse_bar(ax4, data, without_error=args.no_error)
# if args.export:
# fig.savefig("%s.%s" % (basename, args.ext))
print_data(data, "memory", "traverse", "cdarts")
if not args.hide:
plt.show()
import argparse
import os
import subprocess
import time
from enum import Enum
from generate_data import bin_exist
class Heuristic(Enum):
EDGE = "edge"
INTERSECTION = "intersection"
SEGMENT = "segment"
SEGMENT_OLD = "segment-old"
def __str__(self):
return self.value
def __repr__(self):
return str(self)
PARALLEL_ARRANGEMENT_BIN = "./builds/release/parallel-arrangement"
# COMPUTE_BAND_BIN = "./builds/release/tools/compute-band"
def parallel_arrangement(t1, nbt,
xpos=None,
crop=False, optimized=False, traverse=False,
cgal=False):
args = [
PARALLEL_ARRANGEMENT_BIN,
"-t1", t1,
"-nbt", str(nbt),
"-nbs", str(nbt)
]
if xpos:
args.append("-xpos")
args.extend(map(str,xpos))
if crop:
args.append("-crop")
if optimized:
args.append("-optimized-bands")
if traverse:
args.append("-traverse")
if cgal:
args.append("-cgal")
return subprocess.run(args, stdout=subprocess.PIPE)
# def compute_band(filename, nbband, heuristic):
# args = [
# COMPUTE_BAND_BIN,
# filename,
# str(nbband),
# "--" + str(heuristic)
# ]
# proc = subprocess.run(args, stdout=subprocess.PIPE)
# lines = proc.stdout.decode("utf-8").splitlines()
# pos = lines[-1].split()[1:]
# band_pos = [ float(e) for e in pos ]
# return proc, band_pos
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("input", nargs="+",
help="set input, input can be a directory or list of file")
parser.add_argument('--verbose', '-v', action='count', default=0,
help="set verbosity level")
parser.add_argument("--log", "-l", default="log.txt",
help="set log filename, default is log.txt")
config = parser.add_argument_group("benchmark configuration")
config.add_argument("--thread", "-t", required=True, type=int, nargs="+",
help="set number of threads")
config.add_argument("--repeat", "-r", required=True, type=int, default=5,
help="set number of repeatition per data")
config.add_argument("--nocgal", action="store_true",
help="disable benchmark with cgal method")
optimi = parser.add_argument_group("optimization configuration")
optimi.add_argument("--optimized", action="store_true",
help="use optimized method")
optimi.add_argument("--crop", action="store_true",
help="use crop method")
optimi.add_argument("--heuristic", type=Heuristic,
choices=list(Heuristic),
help="set heuristic used to compute band for each thread")
binary = parser.add_argument_group("binaries configuration")
binary.add_argument("--arr-bin", default=PARALLEL_ARRANGEMENT_BIN,
help="set 'parallel-arrangement' binary path, default value is set line 9")
# binary.add_argument("--band-bin", default=COMPUTE_BAND_BIN,
# help="set 'compute-band' binary path, default value is set line 11")
args = parser.parse_args()
PARALLEL_ARRANGEMENT_BIN = args.arr_bin
# COMPUTE_BAND_BIN = args.band_bin
if not bin_exist(PARALLEL_ARRANGEMENT_BIN):
raise NameError("'" + PARALLEL_ARRANGEMENT_BIN + "' parallel-arrangement path does not exist. "
"Set variable 'PARALLEL_ARRANGEMENT_BIN' line 23 to a correct path.")
# if not bin_exist(COMPUTE_BAND_BIN):
# raise NameError("'" + COMPUTE_BAND_BIN + "' compute-band path does not exist. "
# "Set variable 'COMPUTE_BAND_BIN' line 24 to a correct path.")
if len(args.input) == 1 and os.path.isdir(args.input[0]):
dir = args.input[0]
args.input = [ os.path.join(dir, f) for f in os.listdir(dir) ]
iter = 0
total_iter = (len(args.thread) + (0 if args.nocgal else 1)) * len(args.input) * args.repeat
def advance_and_print():
global iter
iter += 1
if args.verbose > 1:
print("progress: %d / %d" % (iter, total_iter))
log_separator = ("#" * 80 + "\n").encode()
start = time.time()
with open(args.log, "wb") as log_file:
for input in args.input:
band_xpos = { t:None for t in args.thread }
# if args.heuristic:
# for thread in args.thread:
# if thread != 1:
# _, pos = compute_band(input, thread, args.heuristic)
# band_xpos[thread] = pos
for j in range(args.repeat):
if not args.nocgal:
proc = parallel_arrangement(input, 1,
cgal=True,
traverse=True)
log_file.write(log_separator)
log_file.write(("# \"%s\" repeat %d thread cgal\n" % (input, j)).encode())
log_file.write(proc.stdout)
advance_and_print()
for thread in args.thread:
proc = parallel_arrangement(input, thread,
crop=args.crop,
optimized=args.optimized,
xpos=band_xpos[thread],
traverse=True)
log_file.write(log_separator)
log_file.write(("# \"%s\" repeat %d thread %d\n" % (input, j, thread)).encode())
log_file.write(proc.stdout)
advance_and_print()
stop = time.time()
if args.verbose:
print("ellapsed time:", stop - start, "sec")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment