Skip to content

Commit

Permalink
added JavaABS
Browse files Browse the repository at this point in the history
  • Loading branch information
chrstphlbr committed Mar 14, 2018
1 parent d2df5bd commit 32b7d27
Show file tree
Hide file tree
Showing 27 changed files with 1,597 additions and 0 deletions.
94 changes: 94 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
# Java API Benchmarking Score (ABS)
JavaABS is a tool to execute microbenchmarks written with JMH.

Moreover, it is used in Laaber and Leitner's paper "An Evaluation of Open-Source Software Microbenchmark Suites for Continuous Performance Assessment" published at Mining Software Repositories (MSR) in 2018.

## Execution
Run the following script to execute ABS:
```bash
groovy -cp "src:." copper.groovy -d -c config.json
```

### Arguments
* `-c` config file
* `-d` dynamic ABS metric

### Config File
Examplary configuration file for RxJava project:
```json
{
"project" : "/home/ubuntu/projects/java/RxJava",
"build_system" : "gradle",
"gradle_target" : "clean build -x test",
"benchmarks" : "build/libs",
"benchmark_jar" : "rxjava-1.2.10-SNAPSHOT-benchmarks.jar",
"custom_benchmark_config" : "-wi 10 -i20 -f 1",
"degree_of_violation" : "0.6",
"confidence" : "0.05",
"min_effect_size" : "0.3",
"log" : "tmp.csv",
"repeats" : 2,
"files" : [
{
"test_file": "src/main/java/rx/internal/util/SubscriptionList.java",
"methods": [
{
"name": "add",
"params": [
"Subscription"
],
"typeParams": {}
}
]
},
{
"test_file": "src/main/java/rx/Observable.java",
"methods": [
{
"name": "lift",
"params": [
"Operator<? extends R, ? super T>"
],
"typeParams": {
"R": "java.lang.Object",
"T": "java.lang.Object"
}
},
{
"name": "unsafeSubscribe",
"params": [
"Subscriber<? super T>"
],
"typeParams": {
"T": "java.lang.Object"
}
}
]
}
]
}
```

JSON attributes (partial):
* `"project"` path to project directory
* `"build_system` mvn or gradle
* `"benchmarks` folder where JMH jar is placed
* `"benchmark_jar` name of JMH jar
* `"degree_of_violation"` inserted relative regression for ABS
* `"log"` output file path
* `"repeats"` repetitions of experiment (r in MSR paper)
* `"files"` methods to inject regressions into

### Output
JavaABS reports all results in CSV form to the file specified as `"log"`.
A sample output file is depicted below:
```csv
Run;Method altered;Microbenchmark;Result
0;Baseline;io.protostuff.benchmarks.RuntimeSchemaBenchmark.baseline;0.9870666624852784
0;Baseline;io.protostuff.benchmarks.RuntimeSchemaBenchmark.baseline;0.953202183493458
0;Baseline;io.protostuff.benchmarks.RuntimeSchemaBenchmark.generated_deserialize_10_int_field;80.25977955639304
0;Baseline;io.protostuff.benchmarks.RuntimeSchemaBenchmark.generated_deserialize_10_int_field;88.68216840394962
```


279 changes: 279 additions & 0 deletions copper.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,279 @@
import groovy.json.JsonBuilder
import groovy.json.JsonSlurper
import abs.*

//import java.nio.file.Paths
//import abs.callgraph.BFS
//import abs.callgraph.BenchmarkFinderImpl
//import abs.callgraph.InterfaceImplementerAll
//import abs.callgraph.StaticWalker
//import abs.callgraph.TypeSolverFactory
//TODO: switch dependency as soon as version 0.5.3 comes out. This is a hacky solution that needs manually changing the grape/ivy repository
// @Grab(group='com.github.javaparser', module='java-symbol-solver-core', version='0.5.2-cl')


def writeToLog(def logfile, def run, def name, def result) {
result.each { benchmark, data ->
data.each { item ->
logfile.append("$run;$name;$benchmark;$item\n")
}
}
}

def defrostResultsFromLog(def filename) {
def results = [:]
new File(filename).splitEachLine(";") { fields ->
def run = fields[0] as int
def method = fields[1]
def benchmark = fields[2]
def val = fields[3] as double
if(!results.containsKey(run))
results[run] = [:]
if(!results[run].containsKey(method))
results[run][method] = [:]
if(!results[run][method].containsKey(benchmark))
results[run][method][benchmark] = []
results[run][method][benchmark] << val
}
return results
}

def do_run(def runnr) {

BenchmarkRunner runner =
(config.build_system && config.build_system == 'gradle') ?
new GradleJMHBenchmarkRunner(config.project, config.benchmarks, config.benchmark_jar, config.custom_benchmark_config, config.gradle_target) :
new MvnJMHBenchmarkRunner(config.project, config.benchmarks, config.benchmark_jar, config.custom_benchmark_config)

if(config.benchmarks_to_execute) {
def parsed = parseBenchmarksToExecute(config.benchmarks_to_execute)
runner.setBenchmarksToExecute(parsed)
}

// config sanity check
config.files.each { file ->
file.methods.each { method ->
RegressionInducer changer = new PerfRegressionInducer("${config.project}/${file.test_file}",
method.name, method.params, config.degree_of_violation as double)
changer.doUpdate()
changer.resetChanges()
}
}
println "##### Config seems ok #####"

new File("codedumps/$runnr").mkdir()

println "##### Baseline Run $runnr #####"
// baseline run
def baselineResult = runner.run(new EmptyRegressionInducer(), "")
writeToLog(logfile, runnr, "Baseline", baselineResult)
println "##### Baseline Run $runnr Finished #####"

// test runs
def results = [:]
config.files.each { file ->
if (file == null || file.test_file == null) {
println "##### Empty file, not executing"
return
}
println "##### Started Running $runnr for ${file.test_file} #####"
def testfile = file.test_file
def dumpFileName = testfile.replaceAll("/",".").replaceAll(".java","")
new File("codedumps/$runnr/$dumpFileName").mkdir()
file.methods.each { method ->
println "##### Test run $runnr for ${method.name} #####"
RegressionInducer changer = new PerfRegressionInducer("${config.project}/$testfile",
method.name, method.params, config.degree_of_violation as double)
def testResult = runner.run(changer, "codedumps/$runnr/$dumpFileName/${method.name}")
def fullname = "$testfile.${method.name}(${method.params})"
results[fullname] = testResult
writeToLog(logfile, runnr, fullname, testResult)
}
}
println "##### Finished $runnr #####"

}

def parseBenchmarksToExecute(def listOfConfigs) {
listOfConfigs.collect{ config ->
new BenchmarkToExecute(pattern:config.pattern, params:config.params)
}
}

def detectableSlowdown(def allresults, def method, def run) {

def runresults = allresults[run]
def baselineresults = collectBaselineResults(allresults)
def testresults = runresults.subMap([method])

def tester = new TTester()
def pVals = tester.testForChanges(baselineresults, testresults[method])
// Bonferroni correction
def correctedAlpha = Double.parseDouble(config.confidence) / pVals.size()
def activeTests = pVals.findAll{ _, entry ->
entry["p"] < correctedAlpha && entry["dm"] > (config.min_effect_size as double)
}
println "For $method in run $run, ${activeTests.size()} benchmarks showed a difference (of ${pVals.size()})"
if(activeTests.size() > 0) {
println "Indicating benchmarks:"
activeTests.each{m, r -> println " $m" }
}
return activeTests
}

def collectBaselineResults(def all) {
def maps = all.collect{_, run ->
run['Baseline']
}
def collectedMaps = [:]
maps[0].each{ key, val ->
collectedMaps[key] = maps.collect{ it[key] }.flatten()
}
return collectedMaps
}

def buildProject(config) {
if (!config.scg.compile) {
return
}
procStr = ""
if (config.build_system && config.build_system == "gradle") {
procStr = "./gradlew ${config.gradle_target}"
} else {
// assume maven
procStr = "mvn clean install -DskipTests"
}
def proc = procStr.execute(null, new File(config.project))
proc.in.eachLine { line -> println line }
proc.out.close()
proc.waitFor()
}


def parseArgs(args) {
def cli = new CliBuilder(usage: 'copper')
cli.d('run dynamic ptc')
cli.s('run static callgraph ptc')
cli.c('config file', required:true, args:1)
def options = cli.parse(args)
if (options == null) {
return null
}

if (!options.getProperty('d') && !options.getProperty('s')) {
println("error: Missing required option: either d or s")
cli.usage()
return null
}
return options
}

def options = parseArgs(this.args)
if (options == null) {
return
}

def configPath = options.getProperty('c')
println("# load config file: $configPath")
def slurper = new JsonSlurper()
config = slurper.parse(new File(configPath))

// dynamic approach
if (options.getProperty('d')) {
logfile = new File((String)config.log)
logfile.write("")
repeats = config.repeats as int

println "##### Creating directory for code dumps #####"
if(new File("codedumps").exists()) {
new File("codedumps").deleteDir()
}
new File("codedumps").mkdir()

repeats.times { run ->
do_run(run)
}

allResults = defrostResultsFromLog((String)config.log)
// allResults = defrostResultsFromLog("/Users/philipp/Downloads/results/trial_run_1/result_run3_0.5_0.4_3.csv")
runs = allResults.keySet()
methods = []
allResults.each {run, results ->
results.each { method, _ ->
methods << method
}
}
methods = methods.unique() - "Baseline"

slowdownDetections = methods.count { method ->
def slowdownsDetected = runs.collect { run ->
detectableSlowdown(allResults, method, run)
}

def benchmarks = allResults[0]["Baseline"].collect{ it.key }
def allDetected = benchmarks.any{benchmark ->
slowdownsDetected.every{run ->
run.keySet().contains(benchmark)
}
}
return allDetected
}
ptc = slowdownDetections / (double)methods.size()
println "Dynamic coverage value was ${ptc}"
}

// // static approach
// if (options.getProperty('s')) {
// // compile project
// buildProject(config)
// // get jars
// def filePattern = config.scg.filePattern
// if (filePattern == null || filePattern == "") {
// filePattern = ".*"
// }
//
// def jars = Project.jars(config.project, config.scg.jars, filePattern)
// def typeSolver = TypeSolverFactory.get(jars)
//
// def bf = new BenchmarkFinderImpl(typeSolver)
// def benchmarkMethods = bf.all(Paths.get(config.project, config.benchmarks).toString())
//
// // run call graph walker
// def scg = new StaticWalker(config.project, config.scg.lib, config.scg.qualifiedPathPrefix, config.files, benchmarkMethods)
// jars.each { jar ->
// def path = jar.getAbsolutePath()
// println("add jar '${path}'")
// scg.addJar(path)
// }
// scg.addInterfaceToClassEdges(new InterfaceImplementerAll(config.project, typeSolver))
// def finder = new BFS()
// def rms = scg.reachableMethods(benchmarkMethods, finder)
//
// def found = new HashSet()
// rms.forEach({ _, fs ->
// fs.each { f ->
// found.add(f)
// }
// })
// // calculate static performance test coverage
// def sum = 0
// config.files.each { f ->
// sum += f.methods.size()
// }
// def ptc = 0
// if (sum != 0) {
// ptc = found.size()*1.0 / sum
// }
// println("Core Method Finder: ${finder.noExceptions} successful searches; ${finder.exceptions} exceptions")
// println("Static call graph coverage value was ${ptc}")
//
// // print results
// String outPath = config.scg.out
// if (outPath != null && outPath != "") {
// def outValues = new HashMap()
// outValues.put("coverage", ptc)
// outValues.put("methods", rms)
// def out = new File(outPath)
// out.write(new JsonBuilder(outValues).toPrettyString())
// }
// }
Loading

0 comments on commit 32b7d27

Please sign in to comment.