@inproceedings{dura2024clog,
  abstract = {We present Clog, a declarative language for describing static code checkers for C. Unlike other extensible state-of-the-art checker frameworks, Clog enables powerful interprocedural checkers without exposing the underlying program representation: Clog checkers consist of Datalog-style recursive rules that access the program under analysis via syntactic pattern matching and control flow edges only. We have implemented Clog on top of Clang, using a custom Datalog evaluation strategy that piggy-backs on Clang's AST matching facilities while working around Clang's limitations to achieve our design goal of representation independence. Our experiments demonstrate that Clog can concisely express a wide variety of checkers for different security vulnerabilities, with performance that is similar to Clang's own analyses and highly competitive on real-world programs.},
  address = {New York, NY, USA},
  author = {Dura, Alexandru and Reichenbach, Christoph},
  booktitle = {Proceedings of the 33rd ACM SIGPLAN International Conference on Compiler Construction},
  doi = {10.1145/3640537.3641579},
  isbn = {9798400705076},
  keywords = {C, Datalog, Static Analysis Frameworks, Syntactic Patterns},
  location = {, Edinburgh, United Kingdom, },
  numpages = {12},
  pages = {186–197},
  publisher = {Association for Computing Machinery},
  series = {CC 2024},
  title = {Clog: A Declarative Language for C Static Code Checkers},
  url = {https://doi.org/10.1145/3640537.3641579},
  year = {2024}
}
 
 

@inproceedings{couderc2023collections,
  author = {Noric Couderc and Christoph Reichenbach and Emma S{\"{o}}derberg},
  booktitle = {Proceedings of the 27th International Conference on Evaluation and Assessment in Software Engineering {EASE}},
  doi = {10.1145/3593434.3593469},
  location = {Oulu, Finland},
  month = {Jun},
  pages = {111--120},
  publisher = {{ACM}},
  title = {Classification-based Static Collection Selection for Java: Effectiveness and Adaptability},
  url = {https://doi.org/10.1145/3593434.3593469},
  year = {2023}
}
 
 

@conference{rizwan2023rossmarie,
  author = {Rizwan, Momina and Reichenbach, Christoph and Kr{\"u}ger, Volker},
  booktitle = {Second Workshop on Quality and Reliability Assessment of Robotic Software Architectures and Components},
  location = {London, UK},
  month = {Jun},
  title = {ROSSMARie: A Domain-Specific Language To Express Dynamic Safety Rules and Recovery Strategies for Autonomous Robots},
  year = {2023}
}
 
 

@inproceedings{rizwan2023ezskiros,
  author = {Rizwan, Momina and Caldas, Ricardo and Reichenbach, Christoph and Mayr, Matthias},
  booktitle = {2023 IEEE/ACM 5th International Workshop on Robotics Software Engineering (RoSE)},
  doi = {10.1109/RoSE59155.2023.00014},
  location = {Melbourne, Australia},
  month = {May},
  number = {},
  pages = {61-68},
  publisher = {{IEEE}},
  title = {EzSkiROS: A Case Study on Embedded Robotics DSLs to Catch Bugs Early},
  volume = {},
  year = {2023}
}
 
 

@inproceedings{couderc2023bayesian,
  abstract = {Statistics are part of any empirical science, and performance analysis is no exception. However, for non-statisticians, picking the right statistical tool to answer a research question can be challenging; each statistical tool comes with a set of assumptions, and it is not clear to researchers what happens when those assumptions are violated. Bayesian statistics offers a framework with more flexibility and with explicit assumptions. In this paper, we present a method to analyse benchmark results using Bayesian inference. We demonstrate how to perform a Bayesian analysis of variance (ANOVA) to estimate what factors matter most for performance, and describe how to investigate what factors affect the impact of optimizations. We find the Bayesian model more flexible, and the Bayesian ANOVA's output easier to interpret.},
  author = {Couderc, Noric and Reichenbach, Christoph and S{\"o}derberg, Emma},
  booktitle = {2023 IEEE/ACM 45th International Conference on Software Engineering: New Ideas and Emerging Results (ICSE-NIER)},
  doi = {10.1109/ICSE-NIER58687.2023.00026},
  location = {Melbourne, Australia},
  month = {May},
  number = {},
  pages = {112-116},
  publisher = {{IEEE}},
  title = {Performance Analysis with Bayesian Inference},
  url = {https://doi.org/10.1109/ICSE-NIER58687.2023.00026},
  volume = {},
  year = {2023}
}
 
 

@inproceedings{munir2023todler,
  author = {Munir, Sundas and Reichenbach, Christoph},
  booktitle = {2023 IEEE/ACM 6th International Workshop on Emerging Trends in Software Engineering for Blockchain (WETSEB)},
  doi = {10.1109/WETSEB59161.2023.00007},
  location = {Melbourne, Australia},
  month = {May},
  number = {},
  pages = {9-16},
  publisher = {{IEEE}},
  title = {TODLER: A Transaction Ordering Dependency anaLyzER - for Ethereum Smart Contracts},
  volume = {},
  year = {2023}
}
 
 

@inproceedings{riouak2022jfeature,
  abstract = {Software corpora are crucial for evaluating research artifacts and ensuring repeatability of outcomes. Corpora such as DaCapo and Defects4J provide a collection of real-world open-source projects for evaluating the robustness and performance of software tools like static analysers. However, what do we know about these corpora? What do we know about their composition? Are they really suited for our particular problem? We developed JFEATURE, an extensible static analysis tool that extracts syntactic and semantic features from Java programs, to assist developers in answering these questions. We demonstrate the potential of JFEATURE by applying it to four widely-used corpora in the program analysis area, and we suggest other applications, including longitudinal studies of individual Java projects and the creation of new corpora.},
  address = {Los Alamitos, CA, USA},
  author = {Riouak, Idriss and Hedin, G{\"o}rel and Reichenbach, Christoph and Fors, Niklas},
  booktitle = {2022 IEEE 22nd International Working Conference on Source Code Analysis and Manipulation (SCAM)},
  doi = {10.1109/SCAM55253.2022.00033},
  issn = {},
  keywords = {java;source coding;semantics;static analysis;syntactics;feature extraction;robustness},
  month = {Oct},
  pages = {236-241},
  publisher = {IEEE Computer Society},
  title = {{JFeature: Know Your Corpus}},
  url = {https://doi.ieeecomputersociety.org/10.1109/SCAM55253.2022.00033},
  volume = {},
  year = {2022}
}
 
 

@inproceedings{dura2021javadl,
  author = {Alexandru Dura and Christoph Reichenbach and Emma S{\"o}derberg},
  booktitle = {Proceedings of the ACM on Programming Languages},
  doi = {10.1145/3485542},
  issue = {OOPSLA 2021},
  location = {Virtual},
  month = {Sep},
  publisher = {ACM},
  title = {JavaDL: Automatically Incrementalizing Java Bug Pattern Detection},
  year = {2021}
}
 
 

@inproceedings{riouak2021intracfg,
  author = {Idriss Riouak and Christoph Reichenbach and G{\"{o}}rel Hedin and Niklas Fors},
  booktitle = {21st {IEEE} International Working Conference on Source Code Analysis
 and Manipulation, {SCAM} 2021},
  doi = {10.1109/SCAM52516.2021.00009},
  location = {Virtual},
  month = {Sep},
  pages = {1-11},
  publisher = {{IEEE} Computer Society},
  title = {A Precise Framework for Source-Level Control-Flow Analysis},
  year = {2021}
}
 
 

@inproceedings{haberg2021vulnerable,
  abstract = {Open-source software (OSS) is increasingly used by software applications. It allows for code reuse, but also comes with the problem of potentially being affected by the vulnerabilities that are found in the OSS libraries. With large numbers of OSS components and a large number of published vulnerabilities, it becomes challenging to identify and analyze which OSS components need to be patched and updated. In addition to matching vulnerable libraries to those used in software products, it is also necessary to analyze if the vulnerable functionality is actually used by the software. This process is both time-consuming and error-prone. Automating this process presents several challenges, but has the potential to significantly decrease vulnerability exposure time. In this paper, we propose a modular framework for analyzing if software code is using the vulnerable part of a library, by analyzing and matching the call graphs of the software with changes resulting from security patches. Further, we provide an implementation of the framework targeting Java and the Maven dependency management system. This allows us to identify 20% of the dependencies in our sample projects as false positives. We also identify and discuss challenges and limitations in our approach.},
  author = {Rasmus Hagberg and Martin Hell and Christoph Reichenbach},
  booktitle = {18th International Conference on Security and Cryptography, SECRYPT 2021},
  doi = {10.5220/0010548205230530},
  language = {English},
  publisher = {INSTICC Press},
  title = {Using Program Analysis to Identify the Use of Vulnerable Functions},
  year = {2021}
}
 
 

@inproceedings{reichenbach2021ticks,
  abstract = {Software bugs cost time, money, and lives. They drive software research and development efforts, and are central to modern software engineering. Yet we lack a clear and general definition of what bugs are. Some bugs are defects, clearly defined as failures to meet some requirement or specification. However,there are many forms of undesirable program behaviour that are completely compatible with a typical program{\textquoteright}s specification. In this paper, we argue that the lack of a criterion for identifying non-defect bugs is hampering the development of tools that find and fix bugs. We propose such a criterion, based on the idea of wasted effort, discuss how bugs that meet our definition of software ticks can complement defects, and sketch how our definition can help future work on software tools.},
  author = {Christoph Reichenbach},
  booktitle = {Proceedings of the 43rd International Conference on Software Engineering: New Ideas and Emerging Results Track (ICSE-NIER 2021)},
  doi = {10.1109/ICSE-NIER52604.2021.00021},
  language = {English},
  location = {Virtual},
  note = {43rd International Conference on Software Engineering: Software Engineering in Practice, ICSE 2021 ; Conference date: 23-05-2021 Through 29-05-2021},
  publisher = {IEEE - Institute of Electrical and Electronics Engineers Inc.},
  title = {{Software Ticks Need No Specifications}},
  year = {2021}
}
 
 

@inproceedings{couderc2020icpe,
  abstract = {Software developers use collection data structures extensively and are often faced with the task of picking which collection to use. Choosing an inappropriate collection can have major negative impact on runtime performance. However, choosing the right collection can be difficult since developers are faced with many possibilities, which often appear functionally equivalent. One approach to assist developers in this decision-making process is to micro-benchmark data-structures in order to provide performance insights. In this paper, we present results from experiments on Java collections (maps, lists, and sets) using our tool JBrainy, which synthesises micro-benchmarks with sequences of random method calls. We compare our results to the results of a previous experiment on Java collections that uses a micro-benchmarking approach focused on single methods. Our results support previous results for lists, in that we found ArrayList to yield the best running time in 90% of our benchmarks. For sets, we found LinkedHashSet to yield the best performance in 78% of the benchmarks. In contrast to previous results, we found TreeMap and LinkedHashMap to yield better runtime performance than HashMap in 84% of cases.},
  address = {New York, NY, USA},
  author = {Couderc, Noric and S{\"o}derberg, Emma and Reichenbach, Christoph},
  booktitle = {Companion of the ACM/SPEC International Conference on Performance Engineering},
  doi = {10.1145/3375555.3383760},
  isbn = {9781450371094},
  keywords = {Java, performance, collections},
  location = {Edmonton AB, Canada},
  numpages = {4},
  pages = {42–45},
  publisher = {Association for Computing Machinery},
  series = {ICPE '20},
  title = {{JBrainy: Micro-benchmarking Java Collections with Interference (Work in Progress Paper)}},
  url = {https://doi.org/10.1145/3375555.3383760},
  year = {2020}
}
 
 

@inproceedings{balldin2020filterdsl,
  address = {New York, NY, USA},
  author = {Balldin, Hampus and Reichenbach, Christoph},
  booktitle = {Proceedings of the 19th ACM SIGPLAN International Conference on Generative Programming: Concepts and Experiences},
  doi = {10.1145/3425898.3426955},
  isbn = {978-1-4503-8174-1/20/11},
  keywords = {Keywords: filtering language, packet filtering, network security, domain-specific languages},
  location = {Virtual},
  numpages = {13},
  publisher = {ACM},
  series = {GPCE '20},
  title = {{A Domain-Specific Language for Filtering in Application-Level Gateways}},
  url = {https://doi.org/10.1145/3425898.3426955},
  year = {2020}
}
 
 

@inproceedings{reichenbach2020pldrevisited,
  address = {New York, NY, USA},
  author = {Reichenbach, Christoph},
  booktitle = {Proceedings of the 2020 ACM SIGPLAN SPLASH-E Symposium},
  doi = {10.1145/3426431.3428655},
  isbn = {978-1-4503-8180-2/20/11},
  keywords = {PL-Detective, Education, Programming Language Concepts},
  location = {Virtual},
  numpages = {11},
  pages = {11--22},
  publisher = {ACM},
  series = {SPLASH-E '20},
  title = {{The PL-Detective Revisited}},
  url = {https://doi.org/10.1145/3426431.3428655},
  year = {2020}
}
 
 

@inproceedings{dura2019metadl,
  acmid = {3329970},
  address = {New York, NY, USA},
  author = {Dura, Alexandru and Balldin, Hampus and Reichenbach, Christoph},
  booktitle = {Proceedings of the 8th ACM SIGPLAN International Workshop on State Of the Art in Program Analysis},
  doi = {10.1145/3315568.3329970},
  isbn = {978-1-4503-6720-2},
  keywords = {Datalog, Domain-Specific Languages, Pattern Matching, Static Analysis},
  location = {Phoenix, AZ, USA},
  numpages = {6},
  pages = {38--43},
  publisher = {ACM},
  series = {SOAP 2019},
  title = {{MetaDL: Analysing Datalog in Datalog}},
  url = {http://doi.acm.org/10.1145/3315568.3329970},
  year = {2019}
}
 
 

@article{narasimhan2018copypaste,
  abstract = {Copy-paste-modify is a form of software reuse in which developers explicitly duplicate source code. This duplicated source code, amounting to a code clone, is adapted for a new purpose. Copy-paste-modify is popular among software developers, however, empirical evidence shows that it complicates software maintenance and increases the frequency of bugs. To allow developers to use copy-paste-modify without having to worry about these concerns, we propose an approach that automatically merges similar pieces of code by creating suitable abstractions. Because different kinds of abstractions may be beneficial in different contexts, our approach offers multiple abstraction mechanisms, which were selected based on a study of popular open-source repositories. To demonstrate the feasibility of our approach, we have designed and implemented a prototype merging tool for C++ and evaluated it on a number of code clones exhibiting some variation, i.e., near-miss clones, in popular Open Source packages. We observed that maintainers find our algorithmically created abstractions to be largely preferable to the existing duplicated code.},
  author = {Narasimhan, Krishna and Reichenbach, Christoph and Lawall, Julia},
  day = {01},
  doi = {10.1007/s10515-018-0238-5},
  issn = {1573-7535},
  journal = {Automated Software Engineering},
  month = {Sep},
  number = {3},
  pages = {627--673},
  title = {Cleaning up copy--paste clones with interactive merging},
  url = {https://doi.org/10.1007/s10515-018-0238-5},
  volume = {25},
  year = {2018}
}
 
 

@inproceedings{narasimhan2017migration,
  acmid = {3018890},
  address = {New York, NY, USA},
  author = {Narasimhan, Krishna and Reichenbach, Christoph and Lawall, Julia},
  booktitle = {Proceedings of the 2017 ACM SIGPLAN Workshop on Partial Evaluation and Program Manipulation},
  doi = {10.1145/3018882.3018890},
  isbn = {978-1-4503-4721-1},
  keywords = {DSL, Program Transformation, Static Analysis},
  location = {Paris, France},
  numpages = {12},
  pages = {47--58},
  publisher = {ACM},
  series = {PEPM 2017},
  title = {Interactive Data Representation Migration: Exploiting Program Dependence to Aid Program Transformation},
  url = {http://doi.acm.org/10.1145/3018882.3018890},
  year = {2017}
}
 
 

@inproceedings{narasimhan2015cpr,
  acmid = {3343965},
  address = {Piscataway, NJ, USA},
  author = {Narasimhan, Krishna and Reichenbach, Christoph},
  booktitle = {Proceedings of the 30th IEEE/ACM International Conference on Automated Software Engineering},
  doi = {10.1109/ASE.2015.39},
  isbn = {978-1-5090-0024-1},
  location = {Lincoln, Nebraska},
  numpages = {11},
  pages = {630--640},
  publisher = {IEEE Press},
  series = {ASE '15},
  title = {Copy and Paste Redeemed},
  url = {https://doi.org/10.1109/ASE.2015.39},
  year = {2015}
}
 
 

@inproceedings{ackermann2015pqlopt,
  abstract = {In many data processing tasks, declarative query programming offers substantial benefit over manual data analysis: the query processors found in declarative systems can use powerful algorithms such as query planning to choose high-level execution strategies during compilation. However, the principal downside of such languages is that their primitives must be carefully curated, to allow the query planner to correctly estimate their overhead. In this paper, we examine this challenge in one such system, PQL/Java. PQL/Java adds a powerful declarative query language to Java to enable and automatically parallelise queries over the Java heap. In the past, the language has not provided any support for custom user-designed datatypes, as such support requires complex interactions with its query planner and backend.},
  address = {Berlin, Heidelberg},
  author = {Ackermann, Hilmar and Reichenbach, Christoph and M{\"u}ller, Christian and Smaragdakis, Yannis},
  booktitle = {Compiler Construction},
  doi = {10.1007/978-3-662-46663-6_6},
  editor = {Franke, Bj{\"o}rn},
  isbn = {978-3-662-46663-6},
  pages = {111--130},
  publisher = {Springer Berlin Heidelberg},
  title = {A Backend Extension Mechanism for PQL/Java with Free Run-Time Optimisation},
  year = {2015}
}
 
 

@article{li2014rfbi,
  acmid = {2656201},
  address = {New York, NY, USA},
  articleno = {7},
  author = {Li, Kaituo and Reichenbach, Christoph and Csallner, Christoph and Smaragdakis, Yannis},
  doi = {10.1145/2656201},
  issn = {1049-331X},
  issue_date = {December 2014},
  journal = {ACM Trans. Softw. Eng. Methodol.},
  keywords = {False warnings, RFBI, existing test cases},
  month = {Dec},
  number = {2},
  numpages = {32},
  pages = {7:1--7:32},
  publisher = {ACM},
  title = {Residual Investigation: Predictive and Precise Bug Detection},
  url = {http://doi.acm.org/10.1145/2656201},
  volume = {24},
  year = {2014}
}
 
 

@inproceedings{li2013sedge,
  author = {Li, Kaituo and Reichenbach, Christoph and Smaragdakis, Yannis and Diao, Yanlei and Csaller, Christoph},
  booktitle = {2013 28th IEEE/ACM International Conference on Automated Software Engineering (ASE)},
  doi = {10.1109/ASE.2013.6693083},
  issn = {},
  keywords = {data flow analysis;program testing;programming languages;reasoning about programs;specification languages;symbolic example data generation;dataflow programs;automatic testing;mapreduce programs;pig platform;Hadoop map-reduce programs;operator use;SEDGE system;dataflow operator;symbolic constraints;symbolic reasoning engine;SMT solver;concolic execution;conventional programming language;dataflow domain;SDSS benchmarks;high-level dataflow language;complex programs;state-of-the-art dynamic-symbolic execution;map-reduce code;test cases;Concrete;Cognition;Benchmark testing;Programming;Educational institutions;Data processing;Extraterrestrial measurements},
  month = {Nov},
  number = {},
  pages = {235-245},
  title = {SEDGE: Symbolic example data generation for dataflow programs},
  volume = {},
  year = {2013}
}
 
 

@inproceedings{li2013secondorder,
  author = {Li, Kaituo and Reichenbach, Christoph and Smaragdakis, Yannis and Young, Michal},
  booktitle = {Proceedings of the 2013 9th Joint Meeting on Foundations of Software Engineering - {ESEC}/{FSE} 2013},
  doi = {10.1145/2491411.2491457},
  publisher = {Association for Computing Machinery ({ACM})},
  title = {Second-order constraints in dynamic invariant inference},
  url = {http://dx.doi.org/10.1145/2491411.2491457},
  year = {2013}
}
 
 

@incollection{altidor2012wildcards,
  author = {John Altidor and Christoph Reichenbach and Yannis Smaragdakis},
  booktitle = {{ECOOP} 2012 {\textendash} Object-Oriented Programming},
  doi = {10.1007/978-3-642-31057-7_23},
  pages = {509--534},
  publisher = {Springer Science $\mathplus$ Business Media},
  title = {Java Wildcards Meet Definition-Site Variance},
  url = {http://dx.doi.org/10.1007/978-3-642-31057-7_23},
  year = {2012}
}
 
 

@inproceedings{li2012rfbi,
  author = {Li, Kaituo and Reichenbach, Christoph and Csallner, Christoph and Smaragdakis, Yannis},
  booktitle = {Proceedings of the 2012 International Symposium on Software Testing and Analysis - {ISSTA} 2012},
  doi = {10.1145/2338965.2336789},
  publisher = {Association for Computing Machinery ({ACM})},
  title = {Residual investigation: predictive and precise bug detection},
  url = {http://dx.doi.org/10.1145/2338965.2336789},
  year = {2012}
}
 
 

@incollection{reichenbach2012pql,
  author = {Reichenbach, Christoph and Smaragdakis, Yannis and Immerman, Neil},
  booktitle = {{ECOOP} 2012 {\textendash} Object-Oriented Programming},
  doi = {10.1007/978-3-642-31057-7_4},
  pages = {53--78},
  publisher = {Springer Science $\mathplus$ Business Media},
  title = {{PQL}: A Purely-Declarative Java Extension for Parallel Programming},
  url = {http://dx.doi.org/10.1007/978-3-642-31057-7_4},
  year = {2012}
}
 
 

@article{reichenbach2010gc,
  author = {Christoph Reichenbach and Neil Immerman and Yannis Smaragdakis and Edward E. Aftandilian and Samuel Z. Guyer},
  doi = {10.1145/1932682.1869482},
  journal = {{ACM} {SIGPLAN} Notices},
  month = {oct},
  number = {10},
  pages = {256},
  publisher = {Association for Computing Machinery ({ACM})},
  title = {What can the {GC} compute efficiently?},
  url = {http://dx.doi.org/10.1145/1932682.1869482},
  volume = {45},
  year = {2010}
}
 
 

@misc{reichenbach2010paralleljvm,
  author = {Christoph Reichenbach and Yannis Smaragdakis},
  booktitle = {Proceedings of the Fun Ideas and Thoughts
 Session at PLDI},
  month = {June 8,},
  title = {A New Java Runtime for a Parallel World},
  year = {2010}
}
 
 

@article{reichenbach2009axil,
  author = {Christoph Reichenbach and Michael G. Burke and Igor Peshansky and Mukund Raghavachari},
  doi = {10.1016/j.is.2009.03.013},
  journal = {Information Systems},
  month = {Nov},
  number = {7},
  pages = {624--642},
  publisher = {Elsevier {BV}},
  title = {Analysis of imperative {XML} programs},
  url = {https://doi.org/10.1016/j.is.2009.03.013},
  volume = {34},
  year = {2009}
}
 
 

@inproceedings{reichenbach2009pm,
  abstract = {Modern development environments support refactoring by providing atomically behaviour-preserving transformations. While useful, these transformations are limited in three ways: (i) atomicity forces transformations to be complex and opaque, (ii) the behaviour preservation requirement disallows deliberate behaviour evolution, and (iii) atomicity limits code reuse opportunities for refactoring implementers.},
  address = {Berlin, Heidelberg},
  author = {Reichenbach, Christoph and Coughlin, Devin and Diwan, Amer},
  booktitle = {ECOOP 2009 -- Object-Oriented Programming},
  doi = {10.1007/978-3-642-03013-0_18},
  editor = {Drossopoulou, Sophia},
  isbn = {978-3-642-03013-0},
  pages = {394--418},
  publisher = {Springer Berlin Heidelberg},
  title = {Program Metamorphosis},
  url = {http://dx.doi.org/10.1007/978-3-642-03013-0_18},
  year = {2009}
}
 
 

@article{henkel2008debugging,
  acmid = {1363105},
  address = {New York, NY, USA},
  articleno = {14},
  author = {Henkel, Johannes and Reichenbach, Christoph and Diwan, Amer},
  doi = {10.1145/1363102.1363105},
  issn = {1049-331X},
  issue_date = {June 2008},
  journal = {ACM Trans. Softw. Eng. Methodol.},
  keywords = {Algebraic interpretation, algebraic specifications, specification discovery},
  month = {Jun},
  number = {3},
  numpages = {37},
  pages = {14:1--14:37},
  publisher = {ACM},
  title = {Developing and Debugging Algebraic Specifications for Java Classes},
  url = {http://doi.acm.org/10.1145/1363102.1363105},
  volume = {17},
  year = {2008}
}
 
 

@article{henkel08errata,
  author = {Henkel, Johannes and Reichenbach, Christoph and Diwan, Amer},
  doi = {10.1109/TSE.2008.22},
  issn = {0098-5589},
  journal = {IEEE Transactions on Software Engineering},
  keywords = {Documentation;Java;Containers;Equations;Computer science},
  month = {Mar},
  number = {2},
  pages = {303-303},
  title = {Errata for "Discovering Documentation for Java Container Classes" [Aug 07 526-543]},
  volume = {34},
  year = {2008}
}
 
 

@article{henkel2007dicovering,
  author = {Henkel, Johannes and Reichenbach, Christoph and Diwan, Amer},
  doi = {10.1109/TSE.2007.70705},
  issn = {0098-5589},
  journal = {IEEE Transactions on Software Engineering},
  keywords = {algebraic specification;Java;software libraries;system documentation;Java container class;software library;formal specification;documentation discovery;algebraic axiom;Documentation;Java;Containers;Formal specifications;Libraries;Packaging;Probes;Testing;Data structures;Natural languages},
  month = {Aug},
  number = {8},
  pages = {526-543},
  title = {Discovering Documentation for Java Container Classes},
  volume = {33},
  year = {2007}
}
 
 

@inproceedings{burke2007axil,
  abstract = {The widespread adoption of XML has led to programming languages that support XML as a first class construct. In this paper, we present a method for analyzing and optimizing imperative XML processing programs. In particular, we present a program analysis, based on a flow-sensitive type system, for detecting both redundant computations and redundant traversals in XML processing programs. The analysis handles declarative queries over XML data and imperative loops that traverse XML values explicitly in a uniform framework. We describe two optimizations that take advantage of our analysis: one merges queries that traverse the same set of XML nodes, and the other replaces an XPath expression by a previously computed result. We show the effectiveness of our method by providing performance measurements on XMark benchmark queries and XLinq sample queries.},
  address = {Berlin, Heidelberg},
  author = {Burke, Michael G. and Peshansky, Igor and Raghavachari, Mukund and Reichenbach, Christoph},
  booktitle = {Database Programming Languages},
  doi = {10.1016/j.is.2009.03.013},
  editor = {Arenas, Marcelo and Schwartzbach, Michael I.},
  isbn = {978-3-540-75987-4},
  pages = {216--230},
  publisher = {Springer Berlin Heidelberg},
  title = {Analysis of Imperative XML Programs},
  year = {2007}
}
 
 

@inproceedings{salvetti2004impact,
  author = {Salvetti, Franco and Lewis, Stephen and Reichenbach, Christoph},
  booktitle = {Proceedings of the AAAI Spring Symposium on Exploring Attitude and Affect in Text: Theories and Applications, Stanford, US},
  pages = {128--133},
  publisher = {The AAAI Press},
  title = {Impact of lexical filtering on overall opinion polarity identification},
  year = {2004}
}
 
 

@inbook{zicari2016bigdata,
  abstract = {In the first part of this chapter we illustrate how a big data project can be set up and optimized. We explain the general value of big data analytics for the enterprise and how value can be derived by analyzing big data. We go on to introduce the characteristics of big data projects and how such projects can be set up, optimized and managed. Two exemplary real word use cases of big data projects are described at the end of the first part. To be able to choose the optimal big data tools for given requirements, the relevant technologies for handling big data are outlined in the second part of this chapter. This part includes technologies such as NoSQL and NewSQL systems, in-memory databases, analytical platforms and Hadoop based solutions. Finally, the chapter is concluded with an overview over big data benchmarks that allow for performance optimization and evaluation of big data technologies. Especially with the new big data applications, there are requirements that make the platforms more complex and more heterogeneous. The relevant benchmarks designed for big data technologies are categorized in the last part.},
  address = {Cham},
  author = {Zicari, Roberto V. and Rosselli, Marten and Ivanov, Todor and Korfiatis, Nikolaos and Tolle, Karsten and Niemann, Raik and Reichenbach, Christoph},
  booktitle = {Big Data Optimization: Recent Developments and Challenges},
  doi = {10.1007/978-3-319-30265-2_2},
  editor = {Emrouznejad, Ali},
  isbn = {978-3-319-30265-2},
  pages = {17--47},
  publisher = {Springer International Publishing},
  title = {Setting Up a Big Data Project: Challenges, Opportunities, Technologies and Optimization},
  url = {https://doi.org/10.1007/978-3-319-30265-2_2},
  year = {2016}
}
 
 

@inbook{salvetti2006opinionpolarity,
  abstract = {One approach to the assessment of overall opinion polarity (OvOP) of reviews, a concept defined in this paper, is the use of supervised machine learning mechanisms. In this paper, the impact of lexical feature selection and feature generalization, applied to reviews, on the precision of two probabilistic classifiers (Na{\"i}ve Bayes and Markov Model) with respect to OvOP identification is observed. Feature generalization based on hypernymy as provided by WordNet, and feature selection based on part-ofspeech (POS) tags are evaluated. A ranking criterion is introduced, based on a function of the probability of having positive or negative polarity, which makes it possible to achieve 100{\%} precision with 10{\%} recall. Movie reviews are used for training and testing the probabilistic classifiers, which achieve 80{\%} precision.},
  address = {Dordrecht},
  author = {Salvetti, Franco and Reichenbach, Christoph and Lewis, Stephen},
  booktitle = {Computing Attitude and Affect in Text: Theory and Applications},
  doi = {10.1007/1-4020-4102-0_23},
  editor = {Shanahan, James G. and Qu, Yan and Wiebe, Janyce},
  isbn = {978-1-4020-4102-0},
  pages = {303--316},
  publisher = {Springer Netherlands},
  title = {Opinion Polarity Identification of Movie Reviews},
  url = {https://doi.org/10.1007/1-4020-4102-0_23},
  year = {2006}
}
 
 

@phdthesis{reichenbach2010pm,
  author = {Christoph Reichenbach},
  school = {Department of Computer Science, University of Colorado at Boulder},
  title = {Program Metamorphosis},
  year = {2010}
}
 
 

@techreport{reichenbach2007pm,
  author = {Reichenbach, Christoph and Diwan, Amer},
  institution = {Department of Computer Science, University of Colorado at Boulder},
  number = {1036},
  title = {Program Metamorphosis},
  year = {2007}
}
 
 

@techreport{henkel2008debugging-tr,
  author = {Henkel, Johannes and Reichenbach, Christoph and Diwan, Amer},
  institution = {Department of Computer Science, University of Colorado at Boulder},
  number = {984},
  title = {Developing and Debugging Algebraic Specifications for Java Classes},
  year = {2004}
}
 
 

@techreport{henkel2007dicovering-tr,
  author = {Henkel, Johannes and Reichenbach, Christoph and Diwan, Amer},
  doi = {10.1109/TSE.2007.70705},
  institution = {Department of Computer Science, University of Colorado at Boulder},
  keywords = {algebraic specification;Java;software libraries;system documentation;Java container class;software library;formal specification;documentation discovery;algebraic axiom;Documentation;Java;Containers;Formal specifications;Libraries;Packaging;Probes;Testing;Data structures;Natural languages},
  number = {985},
  title = {Discovering Documentation for Java Container Classes},
  year = {2004}
}