@inproceedings{unif19,
author = {Daniel J. Dougherty},
title = {A {Coq} Formalization of {Boolean} Unification},
booktitle = {33rd International Workshop on Unification},
year = 2019,
abstract = {We report on a verified implementation of two
(well-known) algorithms for unification modulo the
theory of Boolean rings: Lowenheim's method and the
method of Successive Variable Elimination. The
implementations and proofs of correctness were done
in the Coq proof assistant; we view this
contribution as an early step in a larger project of
developing a suite of verified implementations of
equational unification algorithms. },
pdf = {unif19.pdf}
}
@inproceedings{ifm18,
author = {Daniel J. Dougherty and
Joshua D. Guttman and
John D. Ramsdell},
editor = {Carlo A. Furia and
Kirsten Winter},
title = {Security Protocol Analysis in Context: Computing Minimal Executions
Using {SMT} and {CPSA}},
booktitle = {Integrated Formal Methods - 14th International Conference, {IFM} 2018,
Maynooth, Ireland, September 5-7, 2018, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {11023},
pages = {130--150},
publisher = {Springer},
year = {2018},
url = {https://doi.org/10.1007/978-3-319-98938-9\_8},
doi = {10.1007/978-3-319-98938-9\_8},
timestamp = {Wed, 22 Aug 2018 11:24:17 +0200},
biburl = {https://dblp.org/rec/bib/conf/ifm/DoughertyGR18},
bibsource = {dblp computer science bibliography, https://dblp.org},
abstract = { Cryptographic protocols are used in different
environments, but existing methods for protocol
analysis focus only on the protocols, without being
sensitive to assumptions about their environments.
LPA is a tool which analyzes protocols in context.
LPA uses two programs, cooperating with each other:
CPSA, a well-known system for protocol analysis, and
Razor, a model-finder based on SMT technology. Our
analysis follows the enrich-by-need paradigm, in
which models of protocol execution are generated and
examined. The choice of which models to generate is
important, and we motivate and evaluate LPA's
strategy of building minimal models. ``Minimality''
can be defined with respect to either of two
preorders, namely the homomorphism preorder and the
embedding preorder (i.e.~the preorder of injective
homomorphisms); we discuss the merits of each. Our
main technical contributions are algorithms for
building homomorphism-minimal models and for
generating a set-of-support for the models of a
theory, in each case by scripting interactions with
an SMT solver.},
pdf = {ifm.pdf}
}
@article{arxiv18,
author = {{Dougherty}, D.~J. and {Guttman}, J.~D. and
{Ramsdell}, J.~D. },
title = {{Homomorphisms and Minimality for Enrich-by-Need
Security Analysis}},
journal = {ArXiv e-prints},
archiveprefix = {arXiv},
eprint = {1804.07158},
primaryclass = {cs.CR},
year = 2018,
month = apr,
pdf = {arxiv-lpa.pdf},
abstract = {Cryptographic protocols are used in different
environments, but existing methods for protocol
analysis focus only on the protocols, without being
sensitive to assumptions about their
environments. LPA is a tool which analyzes protocols
in context. LPA uses two programs, cooperating with
each other: CPSA, a well-known system for protocol
analysis, and Razor, a model-finder based on SMT
technology. Our analysis follows the enrich-by-need
paradigm, in which models of protocol execution are
generated and examined. The choice of which models
to generate is important, and we motivate and
evaluate LPA's strategy of building minimal
models. "Minimality" can be defined with respect to
either of two preorders, namely the homomorphism
preorder and the embedding preorder (i.e. the
preorder of injective homomorphisms); we discuss the
merits of each. Our main technical contributions are
algorithms for building homomorphism-minimal models
and for generating a set-of-support for the models
of a theory, in each case by scripting interactions
with an SMT solver. }
}
@inproceedings{fse17,
author = {Tim Nelson and Natasha Danas and Daniel J. Dougherty
and Shriram Krishnamurthi},
title = {The power of "why" and "why not": enriching scenario
exploration with provenance},
abstract = {Scenario-finding tools like the Alloy Analyzer are
widely used in numerous concrete domains like
security, network analysis, UML analysis, and so
on. They can help to verify properties and, more
generally, aid in exploring a system’s behavior.
While scenario finders are valuable for their
ability to produce concrete examples, individual
scenarios only give insight into what is possible,
leaving the user to make their own conclusions about
what might be necessary. This paper enriches
scenario finding by allowing users to ask "why?" and
"why not?" questions about the examples they are
given. We show how to distinguish parts of an
example that cannot be consistently removed (or
changed) from those that merely reflect
underconstraint in the specification. In the former
case we show how to determine which elements of the
specification and which other components of the
example together explain the presence of such facts.
This paper formalizes the act of computing
provenance in scenario-finding. We present Amalgam,
an extension of the popular Alloy scenario-finder,
which implements these foundations and provides
interactive exploration of examples. We also
evaluate Amalgam’s algorithmics on a variety of both
textbook and real-world examples. },
booktitle = {Proceedings of the 2017 11th Joint Meeting on
Foundations of Software Engineering, {ESEC/FSE}
2017, Paderborn, Germany, September 4-8, 2017},
pages = {106--116},
year = 2017,
url = {http://doi.acm.org/10.1145/3106237.3106272},
pdf = {fse17.pdf},
timestamp = {Wed, 16 Aug 2017 08:10:24 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/sigsoft/NelsonDDK17},
bibsource = {dblp computer science bibliography, http://dblp.org}
}
@inproceedings{sefm17,
author = {Natasha Danas and Tim Nelson and Lane Harrison and
Shriram Krishnamurthi and Daniel J. Dougherty},
title = {User Studies of Principled Model Finder Output},
abstract = { Model-finders such as SAT-solvers are attractive
for producing concrete models, either as sample
instances or as counterexamples when properties
fail. However, the generated model is arbitrary. To
address this, several research efforts have proposed
principled forms of output from model-finders. These
include minimal and maximal models, unsat cores, and
proof-based provenance of facts. While these
methods enjoy elegant mathematical foundations, they
have not been subjected to rigorous evaluation on
users to assess their utility. This paper presents
user studies of these three forms of output
performed on advanced students. We find that most of
the output forms fail to be effective, and in some
cases even actively mislead users. To make such
studies feasible to run frequently and at scale, we
also show how we can pose such studies on the
crowdsourcing site Mechanical Turk. },
booktitle = {Software Engineering and Formal Methods - 15th
International Conference, {SEFM} 2017, Trento,
Italy, September 4-8, 2017, Proceedings},
pages = {168--184},
year = 2017,
url = {https://doi.org/10.1007/978-3-319-66197-1_11},
pdf = {sefm17.pdf},
timestamp = {Thu, 24 Aug 2017 12:31:20 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/sefm/DanasNHKD17},
bibsource = {dblp computer science bibliography, http://dblp.org}
}
@inproceedings{edbt17,
author = {Olga Poppe and Chuan Lei and Elke A. Rundensteiner
and Daniel J. Dougherty and Goutham Deva and
Nicholas Fajardo and James Owens and Thomas Schweich
and MaryAnn Van Valkenburg and Sarun
Paisarnsrisomsuk and Pitchaya Wiratchotisatian and
George Gettel and Robert Hollinger and Devin Roberts
and Daniel Tocco},
title = {{CAESAR:} Context-Aware Event Stream Analytics for
Urban Transportation Services},
abstract = { We demonstrate the first full-fledged context-aware
event processing solution, called CAESAR, that
supports applica- tion contexts as first class
citizens. CAESAR offers human-readable specification
of context-aware application seman- tics composed of
context derivation and context processing. Both
classes of queries are only relevant during their
respective contexts. They are suspended otherwise to
save resources and to speed up the system
responsiveness to the current
situation. Furthermore, we demonstrate the context-
driven optimization techniques including context
window push-down and query workload sharing among
overlapping context windows. We illustrate the
usability and performance gain of our CAESAR system
by a use case scenario for urban transportation
services using real data sets. },
booktitle = {Proceedings of the 20th International Conference on
Extending Database Technology, {EDBT} 2017, Venice,
Italy, March 21-24, 2017.},
pages = {590--593},
year = 2017,
url = {https://doi.org/10.5441/002/edbt.2017.77},
pdf = {edbt17.pdf},
timestamp = {Sun, 21 May 2017 00:19:15 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/edbt/PoppeLRDDFOSVPW17},
bibsource = {dblp computer science bibliography, http://dblp.org}
}
@inproceedings{aplas16,
author = {Daniel J. Dougherty and Ugo de'Liguoro and Luigi
Liquori and Claude Stolze},
title = {A Realizability Interpretation for Intersection and
Union Types},
booktitle = {Programming Languages and Systems - 14th Asian
Symposium, {APLAS} 2016, Hanoi, Vietnam, November
21-23, 2016, Proceedings},
pages = {187--205},
year = 2016,
abstract = { {Proof-functional} logical connectives allow
reasoning about the structure of logical proofs, in
this way giving to the latter the status of
{first-class} objects. This is in contrast to
classical {truth-functional} connectives where
the meaning of a compound formula is dependent only
on the truth value of its subformulas. In this
paper we present a typed lambda calculus, enriched
with products, coproducts, and a related
proof-functional logic. This calculus, directly
derived from a typed calculus previously defined by
two of the current authors, has been proved
isomorphic to the well-known
Barbanera-Dezani-Ciancaglini-de'Liguoro type
assignment system. We present a logic L
featuring two proof-functional connectives, namely
strong conjunction and strong disjunction. We prove
the typed calculus to be isomorphic to the logic
L and we give a realizability semantics
using {Mints' realizers} \cite{Mints89} and a
completeness theorem. A prototype implementation is
also described. },
url = {http://dx.doi.org/10.1007/978-3-319-47958-3_11},
doi = {10.1007/978-3-319-47958-3_11},
timestamp = {Mon, 10 Oct 2016 15:12:54 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/aplas/DoughertydLS16},
bibsource = {dblp computer science bibliography, http://dblp.org},
pdf = {aplas16.pdf}
}
@inproceedings{edbt16,
author = {Olga Poppe and Chuan Lei and Elke A. Rundensteiner
and Daniel J. Dougherty},
title = {Context-Aware Event Stream Analytics},
booktitle = {Proceedings of the 19th International Conference on
Extending Database Technology, {EDBT} 2016,
Bordeaux, France, March 15-16, 2016, Bordeaux,
France, March 15-16, 2016.},
pages = {413--424},
year = {2016},
abstract = {Complex event processing is a popular technology for
continuously monitoring high-volume event streams
from health care to traffic management to detect
complex compositions of events. These event
compositions signify critical application contexts
from hygiene violations to traffic
accidents. Certain event queries are only
appropriate in particular contexts. Yet
state-of-the-art streaming engines tend to execute
all event queries continuously regardless of the
current application context. This wastes
tremendous processing resources and thus leads to
delayed reactions to critical situations. We have
developed the first context-aware event process- ing
solution, called CAESAR, which features the
following key innovations. (1) The CAESAR model
supports applica- tion contexts as first class
citizens and associates appropriate event queries
with them. (2) The CAESAR optimizer em- ploys
context-aware optimization strategies including con-
text window push-down strategy and query workload
shar- ing among overlapping contexts. (3) The CAESAR
infras- tructure allows for lightweight event query
suspension and activation driven by context
windows. Our experimental study utilizing both the
Linear Road stream benchmark as well as real-world
data sets demonstrates that the context- aware event
stream analytics consistently outperforms the
state-of-the-art strategies by factor of 8 on
average.},
url = {http://dx.doi.org/10.5441/002/edbt.2016.38},
doi = {10.5441/002/edbt.2016.38},
timestamp = {Tue, 22 Mar 2016 18:14:55 +0100},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/edbt/PoppeLRD16},
bibsource = {dblp computer science bibliography, http://dblp.org}
}
@inproceedings{cade15,
author = {Salman Saghafi and
Ryan Danas and
Daniel J. Dougherty},
title = {Exploring Theories with a Model-Finding Assistant},
booktitle = {Automated Deduction - {CADE-25} - 25th International Conference on
Automated Deduction, Berlin, Germany, August 1-7, 2015, Proceedings},
editor = {Amy P. Felty and
Aart Middeldorp},
series = {Lecture Notes in Computer Science},
volume = {9195},
publisher = {Springer},
pages = {434--449},
year = {2015},
doi = {10.1007/978-3-319-21401-6_30},
timestamp = {Fri, 31 Jul 2015 13:18:27 +0200},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/cade/SaghafiDD15},
bibsource = {dblp computer science bibliography, http://dblp.org},
abstract = {We present an approach to understanding first-order
theories by exploring their models. A typical use
case is the analysis of artifacts such as policies,
protocols, configurations, and software designs.
For the analyses we offer, users are not required to
frame formal properties or construct derivations.
Rather, they can explore examples of their designs,
confirming the expected instances and perhaps
recognizing bugs inherent in surprising instances.
Key foundational ideas include: the information
preorder on models given by homomorphism, an
inductively-defined refinement of the Herbrand base
of a theory, and a notion of provenance for elements
and facts in models. The implementation makes use
of SMT-solving and an algorithm for minimization
with respect to the information preorder on models.
Our approach is embodied in a tool, Razor, that is
complete for finite satisfiability and provides a
read-eval-print loop used to navigate the set of
finite models of a theory and to display
provenance.},
url = {cade15.pdf}
}
@inproceedings{csf14,
author = {Daniel J. Dougherty and
Joshua D. Guttman},
title = {Decidability for Lightweight {Diffie-Hellman} Protocols},
booktitle = {{IEEE} 27th Computer Security Foundations Symposium, {CSF} 2014, Vienna,
Austria, 19-22 July, 2014},
pages = {217--231},
year = {2014},
biburl = {http://dblp.uni-trier.de/rec/bib/conf/csfw/DoughertyG14},
bibsource = {dblp computer science bibliography, http://dblp.org},
abstract = {Many protocols use Diffie-Hellman key agreement,
combined with certified long-term values or digital
signatures for authentication. These protocols aim
at security goals such as key secrecy, forward
secrecy, resistance to key compromise attacks, and
various flavors of authentication. However, these
protocols are challenging to analyze, both in
computational and symbolic models. An obstacle in
the symbolic model is the undecidability of
unification in many theories in the signature of
rings. In this paper, we develop an algebraic
version of the symbolic approach, working directly
within finite fields, the natural structures for the
protocols. The adversary, in giving an attack on a
protocol goal in a finite field, may rely on any
identity in that field. He defeats the protocol if
there are attacks in infinitely many finite fields.
We prove that, even for this strong adversary,
security goals for a wide class of protocols are
decidable. },
url = {csf14.pdf}
}
@inproceedings{paar14,
author = {Salman Saghafi and Daniel J. Dougherty},
title = {Razor: Provenance and Exploration in Model-Finding},
booktitle = {4th Workshop on Practical Aspects of Automated
Reasoning {(PAAR)}},
year = 2014,
abstract = { Razor is a model-finder for first-order theories
presented geometric form; geometric logic is a
variant of first-order logic that focuses on
"observable" properties. An important guiding
principle of Razor is that it be accessible to users
who are not necessarily expert in formal methods;
application areas include software design, analysis
of security protocols and policies, and
configuration management. A core functionality of
the tool is that it supports exploration of the
space of models of a given input theory, as well as
presentation of provenance information about the
elements and facts of a model. The crucial
mathematical tool is the ordering relation on models
determined by homomorphism, and Razor prefers models
that are minimal with respect to this
homomorphism-ordering.},
url = {paar14.pdf}
}
@inproceedings{fcs14,
author = {John D.~Ramsdell and Daniel J.~Dougherty and
Joshua D.~Guttman and Paul D.~Rowe},
title = {A Hybrid Analysis for Security Protocols with State},
booktitle = {Joint Workshop on Foundations of Computer Security
and Formal and Computational Cryptography
{(FCS/FCC)}},
year = 2014,
abstract = {Cryptographic protocols rely on message-passing to
coordinate activity among principals. Each
principal maintains local state in individual local
sessions only as needed to complete that session.
However, in some protocols a principal also uses
state to coordinate its different local sessions.
Sometimes the non-local, mutable state is used as a
means, for example with smart cards or Trusted
Platform Modules. Sometimes it is the purpose of
running the protocol, for example in commercial
transactions. Many richly developed tools and
techniques, based on well-understood foundations,
are available for design and analysis of pure
message-passing protocols. But the presence of
cross-session state poses difficulties for these
techniques. In this paper we provide a framework
for modeling stateful protocols. We define a hybrid
analysis method. It leverages theorem-proving---in
this instance, the PVS prover---for reasoning about
computations over state. It combines that with an
``enrich-by-need'' approach---embodied by
CPSA---that focuses on the message-passing part. As
a case study we give a full analysis of the Envelope
Protocol, due to Mark Ryan.},
url = {fcs14.pdf}
}
@inproceedings{hotsdn13,
author = {Tim Nelson and Arjun Guha and Daniel J. Dougherty
and Kathi Fisler and Shriram Krishnamurthi},
title = {A Balance of Power: Expressive, Analyzable
Controller Programming},
booktitle = {ACM SIGCOMM Workshop on Workshop on Hot Topics in
Software Defined Networks},
year = 2013,
abstract = {Configuration languages for traditional network
hardware are often fairly limited and hence easy to
analyze. Programmable controllers for
software-defined networks are far more flexible, but
this flexibility results in more opportunities for
mis-configuration and greatly complicates
analyses. We propose a new network-programming
paradigm that strikes a balance between expressive
power and analysis, providing a highly analyzable
core language while allowing the re-use of
pre-existing code written in more complex production
languages. As the first step we have created
FlowLog, a declarative language for programming SDN
controllers. We show that FlowLog is expressive
enough to build some real controller programs. It is
also a finite-state language, and thus amenable to
many types of analysis, such as model-checking. In
this paper we present FlowLog, show examples of
controller programs, and discuss analyzing them.},
url = {hotsdn13.pdf}
}
@conference{nelson_ICSE13,
author = {Tim Nelson and Salman Saghafi and Daniel
J. Dougherty and Kathi Fisler and Shriram
Krishnamurthi},
title = {Aluminum: Principled Scenario Exploration through
Minimality},
booktitle = {35th International Conference on Software
Engineering (ICSE)},
year = 2013,
pages = {232--241},
abstract = {Scenario-finding tools such as Alloy are widely used
to understand the consequences of specifications,
with applications to software modeling, security
analysis, and verification. This paper focuses on
the exploration of scenarios: which scenarios are
presented first, and how to traverse them in a
well-defined way. We present Aluminum, a
modification of Alloy that presents only minimal
scenarios: those that contain no more than is
necessary. Aluminum lets users explore the scenario
space by adding to scenarios and backtracking. It
also provides the ability to find what can
consistently be used to extend each scenario. We
describe the semantic basis of Aluminum in terms of
minimal models of first-order logic formulas. We
show how this theory can be implemented atop
existing SAT-solvers and quantify both the benefits
of minimality and its small computational
overhead. Finally, we offer some qualitative
observations about scenario exploration in
Aluminum.},
url = {icse13-aluminum.pdf}
}
@conference{DG12,
author = {Daniel J. Dougherty and Joshua D. Guttman},
title = {An Algebra for Symbolic {Diffie-Hellman} Protocol
Analysis},
booktitle = {Proceedings of the 7th International Symposium on
Trustworthy Global Computing},
year = {2012},
abstract = {We study the algebra underlying symbolic protocol
analysis for protocols using Diffie-Hellman
operations. Diffie-Hellman operations act on a cyclic
group of prime order, together with an exponentiation
operator. The exponents form a finite field: this rich
algebraic structure has resisted previous symbolic
approaches. We define an algebra that validates
precisely the equations that hold almost always as the
order of the cyclic group varies. We realize this
algebra as the set of normal forms of a particular
rewriting theory. The normal forms allow us to define
our crucial notion of indicator, a vector of integers
that summarizes how many times each secret exponent
appears in a message. We prove that the adversary can
never construct a message with a new indicator in our
adversary model. Using this invariant, we prove the
main security goals achieved by UM, a protocol using
Diffie-Hellman for implicit authentication.},
url = {tgc12.pdf}
}
@article{DBLP:journals/corr/abs-1202-2168,
author = {Daniel J. Dougherty and Joshua D. Guttman},
title = {Symbolic Protocol Analysis for {Diffie-Hellman}},
journal = {CoRR},
volume = {abs/1202.2168},
year = {2012},
ee = {http://arxiv.org/abs/1202.2168},
bibsource = {DBLP, http://dblp.uni-trier.de},
abstract = {We extend symbolic protocol analysis to apply to
protocols using Diffie-Hellman
operations. Diffie-Hellman operations act on a cyclic
group of prime order, together with an exponentiation
operator. The exponents form a finite field. This rich
algebraic structure has resisted previous symbolic
approaches. We work in an algebra defined by the
normal forms of a rewriting theory (modulo
associativity and commutativity). These normal forms
allow us to define our crucial notion of indicator, a
vector of integers that summarizes how many times each
secret exponent appears in a message. We prove that
the adversary can never construct a message with a new
indicator in our adversary model. Using this
invariant, we prove the main security goals achieved
by several different protocols that use Diffie-Hellman
operators in subtle ways. We also give a
model-theoretic justification of our rewriting theory:
the theory proves all equations that are uniformly
true as the order of the cyclic group varies.},
url = {iadh-arxiv.pdf}
}
@conference{nelson_abz,
author = {Timothy Nelson and Daniel J. Dougherty and Kathi
Fisler and Shriram Krishnamurthi},
title = {Toward a More Complete {Alloy}},
booktitle = {Abstract State Machines, Alloy, B, VDM, and Z -
Third International Conference, ABZ 2012, Pisa,
Italy, June 18-21, 2012. Proceedings},
publisher = {Springer},
series = {Lecture Notes in Computer Science},
volume = 7316,
year = 2012,
pages = {136-149},
ee = {http://dx.doi.org/10.1007/978-3-642-30885-7_10},
url = {abz12-more-complete-alloy.pdf},
abstract = {Many model-finding tools, such as Alloy, charge
users with providing bounds on the sizes of models.
It would be preferable to automatically compute
sufficient upper-bounds whenever possible. The
Bernays-Sch\"onfinkel-Ramsey fragment of first-order
logic can relieve users of this burden in some
cases: its sentences are satisfiable iff they are
satisfied in a finite model, whose size is
computable from the input problem. Researchers have
observed, however, that the class of sentences for
which such a theorem holds is richer in a
many-sorted framework---which Alloy inhabits---than
in the one-sorted case. This paper studies this
phenomenon in the general setting of order-sorted
logic supporting overloading and empty sorts. We
establish a syntactic condition generalizing the
Bernays-Sch\"onfinkel-Ramsey form that ensures the
Finite Model Property. We give a linear-time
algorithm for deciding this condition and a
polynomial-time algorithm for computing the bound on
model sizes. As a consequence, model-finding is a
complete decision procedure for sentences in this
class. Our work has been incorporated into
Margrave, a tool for policy analysis, and applies in
real-world situations.}
}
@conference{edbt12,
author = {Mo Liu and Medhabi Ray and Dazhi Zhang and Elke
A. Rundensteiner and Daniel J. Dougherty and Chetan
Gupta and Song Wang and Ismail Ari},
title = {Realtime healthcare services via nested complex event
processing technology},
booktitle = {15th International Conference on Extending Database
Technology, EDBT '12, Berlin, Germany, March 27-30,
2012, Proceedings},
year = 2012,
pages = {622-625},
ee = {http://doi.acm.org/10.1145/2247596.2247681},
abstract = {Complex Event Processing (CEP) over event streams has
become increasingly important for real-time
applications ranging from healthcare to supply chain
management. In such applications, arbitrarily complex
sequence patterns as well as non existence of such
complex situations must be detected in real time. To
assure real-time responsiveness for detection of such
complex pattern over high volume high-speed streams,
efficient processing techniques must be
designed. Unfortunately the efficient processing of
complex sequence queries with negations remains a
largely open problem to date. To tackle this
shortcoming, we designed optimized strategies for
handling nested CEP query. In this demonstration, we
propose to showcase these techniques for processing
and optimizing nested pattern queries on streams. In
particular our demonstration showcases a platform for
specifying complex nested queries, and selecting one
of the alternative optimized techniques including
sub-expression sharing and intermediate result caching
to process them. We demonstrate the efficiency of our
optimized strategies by graphically comparing the
execution time of the optimized solution against that
of the default processing strategy of nested CEP
queries. We also demonstrate the usage of the proposed
technology in several healthcare services.}
}
@conference{DBLP:conf/icde/LiuRDGWAM11,
author = {Mo Liu and Elke A. Rundensteiner and Daniel
J. Dougherty and Chetan Gupta and Song Wang and Ismail
Ari and Abhay Mehta},
title = {High-performance nested {CEP} query processing over
event streams},
booktitle = {Proceedings of the 27th International Conference on
Data Engineering, ICDE 2011, April 11-16, 2011,
Hannover, Germany},
year = {2011},
pages = {123-134},
ee = {http://dx.doi.org/10.1109/ICDE.2011.5767839},
bibsource = {DBLP, http://dblp.uni-trier.de},
abstract = {Complex event processing (CEP) over event streams has
become increasingly important for real-time
applications ranging from health care, supply chain
management to business intelligence. These monitoring
applications submit complex queries to track sequences
of events that match a given pattern. As these systems
mature the need for increasingly complex nested
sequence query support arises, while the state-of-art
CEP systems mostly support the execution of flat
sequence queries only. To assure real-time
responsiveness and scalability for pattern detection
even on huge volume high-speed streams, efficient
processing techniques must be designed. In this paper,
we first analyze the prevailing nested pattern query
processing strategy and identify several serious
shortcomings. Not only are substantial subsequences
first constructed just to be subsequently discarded,
but also opportunities for shared execution of nested
subexpressions are overlooked. As foundation, we
introduce NEEL, a CEP query language for expressing
nested CEP pattern queries composed of sequence,
negation, AND and OR operators. To overcome
deficiencies, we design rewriting rules for pushing
negation into inner subexpressions. Next, we devise a
normalization procedure that employs these rules for
flattening a nested complex event expression. To
conserve CPU and memory consumption, we propose
several strategies for efficient shared processing of
groups of normalized NEEL subexpressions. These
strategies include prefix caching, suffix clustering
and customized "bit-marking" execution strategies. We
design an optimizer to partition the set of all CEP
subexpressions in a NEEL normal form into groups, each
of which can then be mapped to one of our shared
execution operators. Lastly, we evaluate our
technologies by conducting a performance study to
assess the CPU processing time using real-world stock
trades data. Our results confirm that our NEEL
execution in many cases performs 100 fold faster than
the traditional iterative nested execution strategy
for real stock market query workloads.},
url = {icde11.pdf}
}
@conference{DBLP:conf/icde/RayLRDGWMA11,
author = {Medhabi Ray and Mo Liu and Elke A. Rundensteiner and
Daniel J. Dougherty and Chetan Gupta and Song Wang
and Abhay Mehta and Ismail Ari},
title = {Optimizing complex sequence pattern extraction using
caching},
booktitle = {Workshops Proceedings of the 27th International
Conference on Data Engineering, ICDE 2011, April
11-16, 2011, Hannover, Germany},
editor = {Serge Abiteboul and Klemens B{\"o}hm and Christoph
Koch and Kian-Lee Tan},
year = {2011},
pages = {243-248},
isbn = {978-1-4244-9194-0},
ee = {http://dx.doi.org/10.1109/ICDEW.2011.5767641},
bibsource = {DBLP, http://dblp.uni-trier.de},
url = {icde-wkshp-11.pdf},
abstract = { Complex Event Processing (CEP) has become
increasingly important for tracking and monitoring
complex event anomalies and trends in event streams
emitted from business processes such as supply chain
management to online stores in e-commerce. These
monitoring applications submit complex event queries
to track sequences of events that match a given
pattern. The state-of-the-art CEP systems mostly
focus on the execution of flat sequence queries, we
instead support the execution of nested CEP queries
specified by our NEsted Event Language
NEEL. However, the iterative execution of nested CEP
expressions often results in the repeated
recomputation of the same or similar results for
nested subexpressions as the window slides over the
event stream. In this work we thus propose to
optimize NEEL execution performance by caching
intermediate results. In particular we design two
methods of applying selective caching of
intermediate results namely Object Caching and the
Interval-Driven Semantic Caching. Techniques for
incrementally loading, purging and exploiting the
cache content are described. Our experimental study
using real-world stock trades evaluates the
performance of our proposed caching strategies for
different query types. }
}
@conference{nelson_margrave-tool,
author = {Timothy Nelson and Christopher Barratt and Daniel
J. Dougherty and Kathi Fisler and Shriram
Krishnamurthi},
booktitle = {Proceedings of the 24th USENIX Large Installation
System Administration Conference (LISA 2010)},
date-added = {2010-12-01 12:21:57 -0500},
date-modified = {2010-12-01 12:36:28 -0500},
title = {The {Margrave} Tool for Firewall Analysis},
year = {2010},
url = {lisa10.pdf},
abstract = {Writing and maintaining firewall configurations can be
challenging, even for experienced system
administrators. Tools that uncover the consequences of
configurations and edits to them can help sysadmins
prevent subtle yet serious errors. Our tool,
Margrave, offers powerful features for firewall
analysis, including enumerating consequences of
configuration edits, detecting overlaps and conflicts
among rules, tracing firewall behavior to specific
rules, and verification against security goals.
Margrave differs from other firewall-analysis tools in
supporting queries at multiple levels (rules, filters,
firewalls, and networks of firewalls), comparing
separate firewalls in a single query, supporting
reflexive ALCs, and presenting exhaustive sets of
concrete scenarios that embody queries. Margrave
supports real-world firewall-configuration languages,
decomposing them into multiple policies that capture
different aspects of firewall functionality. We
present evaluation on networking-forum posts and on an
in-use enterprise firewall-configuration.}
}
@conference{c_lpar10,
author = {Daniel J./ Dougherty and Luigi Liquori},
title = {Logic and computation in a lambda calculus with
intersection and union types},
booktitle = {Proc. 16th International Conference on Logic for
Programming Artificial Intelligence and Reasoning
(LPAR)},
year = 2010,
abstract = {We present an explicitly typed lambda calculus ``a la
Church'' based on the union and intersection types
discipline; this system is the counterpart of the
standard type assignment calculus ``a la Curry.'' Our
typed calculus enjoys Subject Reduction and
confluence, and typed terms are strongly normalizing
when the universal type is omitted. Moreover both
type checking and type reconstruction are decidable.
In contrast to other typed calculi, a system with
union types will fail to be ``coherent'' in the sense
of Tannen, Coquand, Gunter, and Scedrov: different
proofs of the same typing judgment will not
necessarily have the same meaning. In response, we
introduce a decidable notion of equality on
type-assignment derivations inspired by the equational
theory of bicartesian-closed categories.
\\ \\
Some supplementary material can be found here:
\url{lpar10-appendix.pdf}, },
url = {lpar10.pdf}
}
@techreport{nelson_finite-model,
author = {Nelson, Timothy and Dougherty, Daniel J. and Fisler,
Kathi and Krishnamurthi, Shriram},
date-added = {2010-10-20 11:52:34 -0400},
date-modified = {2010-10-20 22:09:06 -0400},
institution = {Worcester Polytechnic Institute},
title = {On the Finite Model Property in Order-Sorted Logic},
year = 2010,
url = {finite-model-tr.pdf},
abstract = {The Schoenfinkel-Bernays-Ramsey class is a fragment
of first-order logic with the Finite Model Property: a
sentence in this class is satisfiable if and only if
it is satisfied in a finite model. Since an upper
bound on the size of such a model is computable from
the sentence, the satisfiability problem for this
family is decidable. Sentences in this form arise
naturally in a variety of application areas, and
several popular reasoning tools explicitly target this
class. Others have observed that the class of
sentences for which such a finite model theorem holds
is richer in a many-sorted framework than in the
one-sorted case. This paper makes a systematic study
of this phenomenon in the general setting of
order-sorted logic supporting overloading and empty
sorts. We establish a syntactic condition
generalizing the Schoenfinkel-Bernays-Ramsey form that
ensures the Finite Model Property. We give a
linear-time algorithm for deciding this condition and
a polynomial-time algorithm for computing the bound on
model sizes. As a consequence, model-finding is a
complete decision procedure for sentences in this
class. Our algorithms have been incorporated into
Margrave, a tool for analysis of access-control and
firewall policies, and are available in a standalone
application suitable for analyzing input to the Alloy
model finder.}
}
@conference{DBLP:conf/sigsoft/FislerKD10,
author = {Kathi Fisler and Shriram Krishnamurthi and Daniel
J. Dougherty},
title = {Embracing policy engineering},
booktitle = {Proceedings of the Workshop on Future of Software
Engineering Research, FoSER 2010, at the 18th ACM
SIGSOFT International Symposium on Foundations of
Software Engineering, 2010, Santa Fe, NM, USA,
November 7-11, 2010},
year = {2010},
pages = {109-110},
ee = {http://doi.acm.org/10.1145/1882362.1882385},
bibsource = {DBLP, http://dblp.uni-trier.de},
url = {foser10.pdf},
abstract = {Declarative policies play a central role in many
modern software systems. Engineering policies and
their interactions with programs raises many
interesting open questions.}
}
@conference{DBLP:conf/dmsn/LiuRRDGWAM10,
author = {Mo Liu and Medhabi Ray and Elke A. Rundensteiner and
Daniel J. Dougherty and Chetan Gupta and Song Wang and
Ismail Ari and Abhay Mehta},
title = {Processing nested complex sequence pattern queries
over event streams},
booktitle = {Proceedings of the 7th Workshop on Data Management for
Sensor Networks, in conjunction with VLDB, DMSN 2010,
Singapore, September 13, 2010},
year = {2010},
pages = {14-19},
ee = {http://doi.acm.org/10.1145/1858158.1858164},
bibsource = {DBLP, http://dblp.uni-trier.de},
url = {dmsn10.pdf},
abstract = {Complex event processing (CEP) has become increasingly
important for tracking and monitoring applications
ranging from health care, supply chain management to
surveillance. These monitoring applications submit
complex event queries to track sequences of events
that match a given pattern. As these systems mature
the need for increasingly complex nested sequence
queries arises, while the state-of-the-art CEP systems
mostly focus on the execution of flat sequence queries
only. In this paper, we now introduce an iterative
execution strategy for nested CEP queries composed of
sequence, negation, AND and OR operators. Lastly we
have introduced the promising direction of applying
selective caching of intermediate results to optimize
the execution. Our experimental study using real-world
stock trades evaluates the performance of our proposed
iterative execution strategy for different query
types.}
}
@conference{DBLP:journals/corr/abs-1001-4427,
author = {Tony Bourdier and Horatiu Cirstea and Daniel
J. Dougherty and H{\'e}l{\`e}ne Kirchner},
title = {Extensional and Intensional Strategies},
booktitle = {Proceedings Ninth International Workshop on Reduction
Strategies in Rewriting and Programming},
editor = {Maribel Fern{\'a}ndez},
year = 2009,
pages = {1-19},
series = {EPTCS},
volume = 15,
ee = {http://dx.doi.org/10.4204/EPTCS.15.1},
abstract = {This paper is a contribution to the theoretical
foundations of strategies. We first present a general
definition of abstract strategies which is extensional
in the sense that a strategy is defined explicitly as
a set of derivations of an abstract reduction
system. We then move to a more intensional definition
supporting the abstract view but more operational in
the sense that it describes a means for determin- ing
such a set. We characterize the class of extensional
strategies that can be defined intensionally. We also
give some hints towards a logical characterization of
intensional strategies and propose a few challenging
perspectives.},
url = {wrs09.pdf}
}
@conference{DBLP:conf/bncod/RaghavanZRD09,
author = {Venkatesh Raghavan and Yali Zhu and Elke
A. Rundensteiner and Daniel J. Dougherty},
title = {Multi-Join Continuous Query Optimization: Covering the
Spectrum of Linear, Acyclic, and Cyclic Queries},
booktitle = {Dataspace: The Final Frontier, 26th British National
Conference on Databases, BNCOD 26, Birmingham, UK,
July 7-9, 2009. Proceedings},
year = 2009,
series = {Lecture Notes in Computer Science},
volume = {5588},
pages = {91-106},
ee = {http://dx.doi.org/10.1007/978-3-642-02843-4_11},
bibsource = {DBLP, http://dblp.uni-trier.de},
url = {bncod09.pdf},
abstract = {Traditional optimization algorithms that guarantee
optimal plans have exponential time complexity and are
thus not viable in streaming contexts. Continuous
query optimizers commonly adopt heuristic techniques
such as Adaptive Greedy to attain polynomial-time
execution. However, these techniques are known to
produce optimal plans only for linear and star shaped
join queries. Motivated by the prevalence of acyclic,
cyclic and even complete query shapes in stream
applications, we conduct an extensive experimental
study of the behavior of the state-of-the-art
algorithms. This study has revealed that
heuristic-based techniques tend to generate
sub-standard plans even for simple acyclic join
queries. For general acyclic join queries we extend
the classical IK approach to the streaming context to
define an algorithm TreeOpt that is guaranteed to find
an optimal plan in polynomial time. For the case of
cyclic queries, for which finding optimal plans is
known to be NP-complete, we present an algorithm FAB
which improves other heuristic-based techniques by (i)
increasing the likelihood of finding an optimal plan
and (ii) improving the effectiveness of finding a
near-optimal plan when an optimal plan cannot be found
in polynomial time. To handle the entire spectrum of
query shapes from acyclic to cyclic we propose a
Q-Aware approach that selects the optimization
algorithm used for generating the join order, based on
the shape of the query.}
}
@conference{w_rule09,
author = {Daniel J.\ Dougherty},
title = {An Improved Algorithm for Generating Database
Transactions from Relational Algebra Specifications},
booktitle = {Proc. 10th International Workshop on Rule-Based
Programming (RULE)},
year = 2009,
abstract = {Alloy is a lightweight modeling formalism based on
relational algebra. In prior work with Fisler,
Giannakopoulos, Krishnamurthi, and Yoo, we have
presented a tool, Alchemy, that compiles Alloy
specifications into implementations that execute
against persistent databases. The foundation of
Alchemy is an algorithm for rewriting relational
algebra formulas into code for database transactions.
In this paper we report on recent progress in
improving the robustness and efficiency of this
transformation.},
url = {rule09.pdf}
}
@conference{c_fm09,
author = {Theophilos Giannakopoulos and Daniel J.\ Dougherty and
Kathi Fisler and Shriram Krishnamurthi},
title = {Towards an Operational Semantics for {A}lloy},
booktitle = {Proc. 16th International Symposium on Formal Methods},
year = 2009,
pages = {483--498},
abstract = {The Alloy modeling language has a mathematically
rigorous denotational semantics based on relational
algebra. Alloy specifications often represent
operations on a state, suggesting a transition-system
semantics. Because Alloy does not intrinsically
provide a notion of state, however, this
interpretation is only implicit in the
relational-algebra semantics underlying the Alloy
Analyzer. In this paper we demonstrate the subtlety
of representing state in Alloy specifications. We
formalize a natural notion of transition semantics for
state-based specifications and show examples of
specifications in this class for which analysis based
on relational algebra can induce false confidence in
designs. We characterize the class of facts that
guarantees that Alloy's analysis is sound for
state-transition systems, and offer a sufficient
syntactic condition for membership in this class. We
offer some practical evaluation of the utility of this
syntactic discipline and show how it provides a
foundation for program synthesis from Alloy.},
url = {fm09.pdf}
}
@proceedings{e_secret08,
title = {Proceedings of the Third International Workshop on
Security and Rewriting Techniques (SecReT 2008)},
editor = {Daniel J. Dougherty and Santiago Escobar},
series = {Electronic Notes in Theoretical Computer Science},
volume = 234,
year = 2009,
pages = {1--2},
issn = {1571-0661}
}
@conference{c_fse08,
author = {Shriram Krishnamurthi and Daniel J. Dougherty and
Kathi Fisler and Daniel Yoo},
title = {Alchemy: Transmuting Base {A}lloy Specifications into
Implementations},
booktitle = {ACM SIGSOFT International Symposium on the Foundations
of Software Engineering},
year = 2008,
pages = {158--169},
abstract = { Alloy specifications are used to define lightweight
models of systems. We present Alchemy, which compiles
Alloy specifications into implementations that execute
against persistent databases. Alchemy translates a
subset of Alloy predicates into imperative update
operations, and it converts facts into database
integrity constraints that it maintains automatically
in the face of these imperative actions. In addition
to presenting the semantics and an algorithm for this
compilation, we present the tool and outline its
application to a non-trivial specification. We also
discuss lessons learned about the relationship between
Alloy specifications and imperative implementations.},
url = {fse08.pdf}
}
@article{a_fest07,
author = {Daniel J. Dougherty and Silvia Ghilezan and Pierre
Lescanne},
title = {Characterizing strong normalization in the
{C}urien-{H}erbelin symmetric lambda calculus:
extending the {C}oppo-{D}ezani heritage},
journal = {Theor. Comput. Sci.},
volume = 398,
number = {1-3},
year = 2008,
issn = {0304-3975},
pages = {114--128},
publisher = {Elsevier Science Publishers Ltd.},
address = {Essex, UK},
abstract = {We develop an intersection type system for the
\lambda-\mu-\mu~ calculus of Curien and Herbelin.
This calculus provides a symmetric computational
interpretation of classical sequent style logic and
gives a simple account of call-by-name and
call-by-value. The present system improves on earlier
type disciplines for the calculus: in addition to
characterizing the expressions that are strongly
normalizing under free (unrestricted) reduction, the
system enjoys the Subject Reduction and the Subject
Expansion properties.},
url = {CDR-festschrift.pdf}
}
@conference{c_dfk_esorics07,
author = {Daniel J. Dougherty and Kathi Fisler and Shriram
Krishnamurthi},
title = {Obligations and their Interaction with Programs},
booktitle = {12th European Symposium On Research In Computer
Security (ESORICS)},
series = {Lecture Notes in Computer Science},
volume = 4734,
year = 2007,
pages = {375--389},
abstract = {Obligations are pervasive in modern systems, often
linked to access control decisions. We present a very
general model of obligations as objects with state,
and discuss its interaction with a program's
execution. We describe several analyses that the
model enables, both static (for verification) and
dynamic (for monitoring). This includes a systematic
approach to approximating obligations for enforcement.
We also discuss some extensions that would enable
practical policy notations. Finally, we evaluate the
robustness of our model against standard definitions
from jurisprudence.},
url = {dfk-esorics07.pdf}
}
@conference{c_dkks_esorics07,
author = {Daniel J. Dougherty and Claude Kirchner and Helene
Kirchner and Anderson Santana de Oliveira},
title = {Modular Access Control via Strategic Rewriting},
booktitle = {12th European Symposium On Research In Computer
Security (ESORICS)},
series = {Lecture Notes in Computer Science},
volume = 4734,
year = 2007,
pages = {578--593},
abstract = {Security policies, in particular access control, are
fundamental elements of computer security. We address
the problem of authoring and analyzing policies in a
modular way using techniques developed in the field of
term rewriting, focusing especially on the use of
rewriting strategies. Term rewriting supports a
formalization of access control with a clear
declarative semantics based on equational logic and an
operational semantics guided by strategies.
Well-established term rewriting techniques allow us to
check properties of policies such as the absence of
conflicts and the property of always returning a
decision. A rich language for expressing rewriting
strategies is used to define a theory of modular
construction of policies in which we can better
understand the preservation of properties of policies
under composition. The robustness of the approach is
illustrated on the composition operators of XACML.},
url = {dkks-esorics07.pdf}
}
@techreport{tr_07,
author = {Daniel J. Dougherty},
title = {Core {XACML} and Term Rewriting},
institution = {Worcester Polytechnic Iinstitute},
year = 2007,
number = {WPI-CS-TR-07-07},
abstract = {We define a notion of ``core'' XACML policies and show how
these can be represented as ground
associative-commutative term rewriting systems with
strategies.},
url = {xacml-and-trs.pdf}
}
@conference{c_wst07,
author = {Daniel J. Dougherty and Silvia Ghilezan and Pierre
Lescanne},
title = {A General Technique for Analyzing Termination in
Symmetric Proof Calculi},
booktitle = {9th International Workshop on Termination (WST)},
year = 2007,
abstract = {Proof-term calculi expressing a computational
interpretation of classical logic serve as tools for
extracting the constructive content of classical
proofs and at the same time can be seen as pure
programming languages with explicit representation of
control. The inherent symmetry in the underlying
logic presents technical challenges to the study of
the reduction properties of these systems. We explore
the use of intersection types for these calculi, note
some of the challenges inherent in applying
intersection types in a symmetric calculus, and show
how to overcome these difficulties. The approach is
applicable to a variety of systems under active
investigation in the current literature; we apply our
techniques in a specific case study: characterizing
termination in the symmetric lambda-calculus of
Barbanera and Berardi.},
url = {wst07.pdf}
}
@conference{c_ijcar_06,
author = {Daniel J. Dougherty and Kathi Fisler and
Shriram Krishnamurthi},
title = {Specifying and Reasoning about Dynamic
Access-Control Policies},
booktitle = {3rd International Joint Conference on Automated
Reasoning (IJCAR)},
series = {Lecture Notes in Computer Science},
volume = 4130,
year = 2006,
pages = {632--646},
abstract = {Access-control policies have grown from simple
matrices to non-trivial specifications written
in sophisticated languages. The increasing
complexity of these policies demands
correspondingly strong automated reasoning
techniques for understanding and debugging
them. The need for these techniques is even
more pressing given the rich and dynamic nature
of the environments in which these policies
evaluate. We define a framework to represent
the behavior of access-control policies in a
dynamic environment. We then specify several
interesting, decidable analyses using
first-order temporal logic. Our work
illustrates the subtle interplay between
logical and state-based methods, particularly
in the presence of three-valued policies. We
also define a notion of policy equivalence that
is especially useful for modular reasoning. },
url = {ijcar06.pdf}
}
@article{a_mscs_06,
author = {Daniel J. Dougherty and Pierre Lescanne and
Luigi Liquori},
title = {Addressed Term Rewriting Systems: Application
to a Typed Object Calculus},
journal = {Mathematical Structures in Computer Science},
year = 2006,
volume = 16,
pages = {667--709},
abstract = {We present a formalism called \emph{Addressed
Term Rewriting Systems,} which can be used to
model implementations of theorem proving,
symbolic computation, and programming
languages, especially aspects of sharing,
recursive computations and cyclic data
structures. Addressed Term Rewriting Systems
are therefore well suited for describing
object-based languages, and as an example we
present a language
incorporating both functional and object-based
features. As a case study in how reasoning
about languages is supported in the ATRS
formalism a type system is defined
and a type soundness result is proved.},
url = {lambda-obj-mscs.pdf}
}
@article{a_sigmod06,
author = {Murali Mani and Song Wang and Daniel
J. Dougherty and Elke A. Rundensteiner},
title = { Join Minimization in {XML}-to-{SQL} Translation:
An Algebraic Approach.},
journal = {Bulletin of ACM SIGMOD},
volume = {35},
number = {1},
year = {2006},
abstract = {Consider an XML view defined over a relational
database, and a user query specified over this
view. This user XML query is typically
processed using the following steps: (a) our
translator maps the XML query to one or more
SQL queries, (b) the relational engine
translates an SQL query to a relational algebra
plan, (c) the relational engine executes the
algebra plan and returns SQL results, and (d)
our translator translates the SQL results back
to XML. However, a straightforward approach
produces a relational algebra plan after step
(b) that is inefficient and has redundant
joins. In this paper, we report on our
preliminary observations with respect to how
joins in such a relational algebra plan can be
minimized, while maintaining bag semantics. Our
approach works on the relational algebra plan
and optimizes it using novel rewrite rules that
consider pairs of joins in the plan and
determine whether one of them is redundant and
hence can be removed. Our study shows that
algebraic techniques achieve effective join
minimization, and such techniques are useful
and can be integrated into mainstream SQL
engines. },
url = {sigmod06.pdf}
}
@article{a_relations_tcs_06,
author = {Daniel J. Dougherty and Claudio Guti\'{e}rrez},
title = {Normal Forms and Reduction for Theories of
Binary Relations},
journal = {Theoretical Computer Science},
year = 2006,
abstract = {We consider the representable equational theory
of binary relations, in a language expressing
composition, converse, and lattice
operations. By working directly with a
presentation of relation expressions as graphs
we are able to define a notion of reduction
which is confluent and strongly normalizing and
induces a notion of computable normal form for
terms. This notion of reduction thus leads to a
computational interpretation of the
representable theory. },
volume = 360,
number = {1--3},
pages = {228--246},
doi = {10.1016/j.tcs.2006.03.023},
url = {relations-tcs.pdf}
}
@conference{c_rta00,
author = {Daniel J.\ Dougherty and Claudio Guti\'{e}rrez},
title = {Normal Forms and Reduction for Theories of Binary
Relations},
pages = {95--109},
year = 2000,
abstract = {},
booktitle = {Proc.\ Eleventh Intl.\ Conf.\ on Rewriting Techniques
and Applications (RTA)},
volume = 1833,
series = {Lecture Notes in Computer Science}
}
@conference{c_lpar_05,
author = {Daniel J. Dougherty and Silvia Ghilezan and
Silvia Likavec and Pierre Lescanne},
title = {Strong normalization of the dual classical
sequent calculus},
booktitle = {12th International Conference on Logic for
Programming, Artificial Intelligence and
Reasoning (LPAR)},
year = 2005,
abstract = {We investigate some syntactic properties of
Wadler's dual calculus, a term calculus which
corresponds to classical sequent logic in the
same way that Parigot's \lambda-\mu calculus
corresponds to classical natural deduction. Our
main result is strong normalization theorem for
reduction in the dual calculus; we also prove
some confluence results for the typed and
untyped versions of the system.},
url = {lpar05.pdf}
}
@article{a_entcs_05a,
author = {Daniel J. Dougherty and Pierre Lescanne and
Luigi Liquori and Frederic Lang},
title = {Addressed Term Rewriting Systems: Syntax,
Semantics, and Pragmatics: Extended Abstract.},
journal = {Electr. Notes Theor. Comput. Sci.},
volume = 127,
number = 5,
year = 2005,
pages = {57-82},
abstract = { We present a formalism called \emph{Addressed Term
Rewriting Systems,} which can be used to define
the operational semantics of programming
languages, especially those involving sharing,
recursive computations and cyclic data
structures. Addressed Term Rewriting Systems
are therefore well suited for describing
object-based languages, as for instance the
family of languages called \lambda-Obja,
involving both functional and object-based
features.},
ee = {http://dx.doi.org/10.1016/j.entcs.2004.12.042},
url = {entcs05-termgraph.pdf}
}
@article{a_entcs_05b,
author = {Daniel J. Dougherty and Silvia Ghilezan and
Pierre Lescanne},
title = {Intersection and Union Types in the
\lambda-\mu-\mu~ calculus.},
journal = {Electr. Notes Theor. Comput. Sci.},
volume = 136,
year = 2005,
pages = {153-172},
abstract = {The original \lambda-\mu-\mu\; calculus of Curien
and Herbelin has a system of simple types,
based on sequent calculus, embodying a
Curry-Howard correspondence with classical
logic. We introduce and discuss three type
assignment systems that are extensions of
lambda-mu-mu~ with intersection and union
types. The intrinsic symmetry in the
lambda-mu-mu~ calculus leads to an essential
use of both intersection and union types.},
ee = {http://dx.doi.org/10.1016/j.entcs.2005.06.010},
url = {entcs05-ITRS.pdf}
}
@article{a_ipl04,
author = {Daniel J. Dougherty and Stanley M. Selkow},
title = {The complexity of the certification of
properties of Stable Marriage.},
journal = {Inf. Process. Lett.},
volume = {92},
number = {6},
year = {2004},
pages = {275-277},
ee = {http://dx.doi.org/10.1016/j.ipl.2004.09.006},
abstract = { We give some lower bounds on the certificate
complexity of some problems concerning stable
marriage, answering a question of Gusfield and
Irving.},
url = {ipl.pdf}
}
@conference{c_ppdp04,
author = {Daniel J. Dougherty and Silvia Ghilezan and
Pierre Lescanne},
title = {Characterizing strong normalization in a
language with control operators},
booktitle = {6th ACM-SIGPLAN International Conference on
Principles and Practice of Declarative
Programming (PPDP)},
year = 2004,
abstract = {We investigate some fundamental properties of
the reduction relation in the untyped term
calculus derived from Curien and Herbelin's
lambda-mu-mu~. The original lambda-mu-mu~ has a
system of simple types, based on sequent
calculus, embodying a Curry-Howard
correspondence with classical logic; the
significance of the untyped calculus of raw
terms is that it is a Turing-complete language
for computation with explicit representation of
control as well as code. We define a type
assignment system for the raw terms satisfying:
a term is typable if and only if it is strongly
normalizing. The
intrinsic symmetry in the calculus leads to an
essential use of both intersection and union
types; in contrast to other union-types systems
in the literature, our system enjoys the
Subject Reduction property.},
url = {ppdp04.pdf}
}
@conference{w_hor04,
author = {Daniel J. Dougherty and Carlos Martinez},
title = {Unification and matching modulo type
isomorphism},
booktitle = {2nd International Workshop on Higher-Order
Rewriting},
year = 2004,
url = {hor04.pdf},
abstract = {We present some initial results in an
investigation of higher-order unification and
matching in the presence of type isomorphism}
}
@article{a_iandc04,
author = {Stephane Lengrand and Pierre Lescanne and
Daniel J.\ Dougherty and Mariangiola
Dezani-Ciancaglini and Steffen van Bakel},
title = {Intersection types for explicit substitutions },
journal = {Information and Computation},
year = 2004,
volume = 189,
number = 1,
pages = {17--42},
abstract = {We present a new system of intersection types
for a composition-free calculus of explicit
substitutions with a rule for garbage
collection, and show that it characterizes
those terms which are strongly
normalizing. This system extends previous work
on the natural generalization of the classical
intersection types system, which characterized
head normalization and weak normalization, but
was not complete for strong normalization. An
important role is played by the notion of
available variable in a term, which is a
generalization of the classical notion of free
variable. },
url = {iandc04.pdf}
}
@article{a_mscs03,
author = {Daniel J.\ Dougherty and Pierre Lescanne},
title = {Reductions, intersection types, and explicit
substitutions},
journal = {Mathematical Structures in Computer Science},
year = 2003,
volume = 13,
number = 1,
pages = {55--85},
abstract = { This paper is part of a general programme of
treating explicit substitutions as the primary
lambda-calculi from the point of view of
foundations as well as applications. We work in
a composition-free calculus of explicit
substitutions and an augmented calculus
obtained by adding explicit garbage-collection,
and explore the relationship between
intersection-types and reduction. We show that
the terms which normalize by leftmost reduction
and the terms which normalize by head reduction
can each be characterized as the terms typable
in a certain system. The relationship between
typability and strong normalization is subtly
different from the classical case: we show that
typable terms are strongly normalizing but give
a counterexample to the converse. Our notions
of leftmost- and head-reduction are
non-deterministic, and our normalization
theorems apply to any computations obeying
these strategies. In this way we refine and
strengthen the classical normalization
theorems. The proofs require some new
techniques in the presence of reductions
involving explicit substitutions. Indeed, in
our proofs we do not rely on results from
classical lambda-calculus, which in our view is
subordinate to the calculus of explicit
substitution. },
url = {mscs03.pdf}
}
@conference{c_tcs02,
author = {Daniel J.\ Dougherty and Pierre Lescanne and
St\'ephane Lengrand},
title = {An improved system of intersection types for
explicit substitutions},
booktitle = {Proc. Conf. Foundations of Information
Technology in the Era of Network and Mobile
Computing},
pages = {511--524},
year = 2002,
organization = {2nd IFIP International Conference on
Theoretical Computer Science},
abstract = { We characterize those terms which are strongly
normalizing in a composition-free calculus of
explicit substitutions by defining a suitable
type system using intersection types. The key
idea is the notion of available variable in a
term, which is a generalization of the
classical notion of free variable. }
}
@conference{c_tlca01,
author = {Daniel J.\ Dougherty and Pierre Lescanne},
title = {Reductions, intersection types, and explicit
substitutions (extended abstract)},
pages = {121-135},
year = 2001,
booktitle = {Proc.\ 5th Int. Conf. on Typed Lambda Calculus
and Applications (TLCA)},
editor = {S. Abramsky},
volume = 2044,
series = {Lecture Notes in Computer Science},
address = {Krakow, Poland},
abstract = {},
url = {tlca01.pdf}
}
@conference{c_rta02,
author = {Daniel J.\ Dougherty and ToMasz Wierzbicki},
title = {A Decidable Variant of Higher Order Matching},
booktitle = {Proc.\ Thirteenth Intl.\ Conf.\ on Rewriting
Techniques and Applications (RTA)},
pages = {340--351},
year = 2002,
editor = {},
volume = 2378,
series = {Lecture Notes in Computer Science},
abstract = {A lambda term is k-duplicating if every
occurrence of a lambda abstractor binds at most
$k$ variable occurrences. We prove that the
problem of higher order matching where
solutions are required to be k-duplicating (but
with no constraints on the problem instance
itself) is decidable. We also show that the
problem of higher order matching in the affine
lambda calculus (where both the problem
instance and the solutions are constrained to
be 1-duplicating) is in NP, generalizing
de~Groote's result for the linear lambda
calculus. },
url = {rta02.pdf}
}
@conference{w_westapp01,
author = {F. Lang and P. Lescanne and L. Liquori and
Daniel J.\ Dougherty and K. Rose},
title = {Obj+a, a generic object-calculus based on
addressed term rewriting systems with modules },
booktitle = {WESTAPP 01: The Fourth International Workshop
on Explicit Substitutions: Theory and
Applications to Programs and Proofs },
abstract = {},
year = 2001
}
@article{a_iandc00,
author = {Daniel J.\ Dougherty and Ramesh Subrahmanyam},
title = {Equality between Functionals in the Presence of
Coproducts},
journal = {Information and Computation},
year = 2000,
volume = 157,
number = {1,2},
pages = {52--83},
abstract = {We consider the lambda-calculus obtained from
the simply-typed calculus by adding products,
coproducts, and a terminal type. We prove the
following theorem: The equations provable in
this calculus are precisely those true in any
set-theoretic model with an infinite base
type. },
url = {iandc00.pdf}
}
@conference{c_lics95,
title = {Equality between Functionals in the Presence of
Coproducts},
author = {Daniel J.\ Dougherty and Ramesh Subrahmanyam},
pages = {282--291},
booktitle = {Proc.\, Tenth Annual {IEEE} Symposium on Logic
in Computer Science},
year = 1995,
address = {San Diego, California},
organization = {IEEE Computer Society Press},
abstract = {}
}
@techreport{t_ldllr99,
author = {Frederic Lang and Daniel J. Dougherty and
Pierre Lescanne and Luigi Liquori and
Kristoffer Rose },
title = {Addressed term rewriting systems},
institution = {Ecole Normale Superieure de Lyon},
year = 1999,
number = {RR 1999-30},
abstract = {We propose Addressed Term Rewriting Systems
(ATRS) as a solution to the still-standing
problem of finding a simple yet formally useful
framework that can account for computation with
sharing, cycles, and side effects. ATRS are
characterised by the combination of three
features: they work with terms where structural
induction is useful, they use a minimal
``back-pointer'' representation of cyclic data,
and they ensure a bounded complexity of rewrite
steps by eliminating implicit pointer
redirection. In the paper we develop this and
show how it is a very useful compromise between
less abstract term graph rewriting and the more
abstract equational rewriting. },
url = {ldlr99.pdf}
}
@techreport{t_llldr99,
author = {F. Lang and P. Lescanne and L. Liquori and
Daniel J.\ Dougherty and K. Rose},
title = {A {G}eneric {O}bject-{C}alculus {B}ased on
{A}ddressed {T}erm {R}ewriting {S}ystems},
abstract = {},
institution = {Laboratoire de l'Informatique et du
Parallelisme, ENS Lyon},
year = 1999,
number = {RR1999-54},
url = {llldr99.pdf}
}
@article{a_tcs98,
author = {Friedrich Otto and Paliath Narendran and Daniel
J.\ Dougherty},
title = {Equational unification, word unification,
and $2$nd-order equational unification},
journal = {Theoretical Computer Science},
volume = 198,
number = {1--2},
pages = {1--47},
day = 30,
year = 1998,
abstract = { For finite convergent term-rewriting systems
the equational unification problem is shown to
be recursively independent of the equational
matching problem, the word matching problem,
and the (simultaneous) 2nd-order equational
matching problem. We also present some new
decidability results for simultaneous
equational unification and 2nd-order equational
matching. },
url = {tcs98.pdf}
}
@conference{c_rta95,
author = {Friedrich Otto and Paliath Narendran and Daniel
J. Dougherty},
title = {Some independence results for equational
unification},
booktitle = {Proc. Sixth Intl. Conf. on Rewriting Techniques
and Applications (RTA 1995)},
pages = {367--381},
year = 1995,
volume = 914,
series = {Lecture Notes in Computer Science},
abstract = {}
}
@article{a_tcs95,
author = {Daniel J.\ Dougherty and Patricia Johann},
title = {A combinatory logic approach to higher-order
{$E$}-unification},
journal = {Theoretical Computer Science},
volume = {139},
number = {1--2},
pages = {207--242},
day = {06},
year = {1995},
issn = {0304-3975},
pubcountry = {Netherlands},
abstract = {Let E be a first-order equational theory. A
translation of typed higher-order E-unification
problems into a typed combinatory logic
framework is presented and justified. The case
in which E admits presentation as a convergent
term rewriting system is treated in detail: in
this situation, a modification of ordinary
narrowing is shown to be a complete method for
enumerating higher-order E-unifiers. In fact,
we treat a more general problem, in which the
types of terms contain type variables. },
url = {tcs95.pdf}
}
@conference{c_cade92,
author = {Daniel J.\ Dougherty and Patricia Johann},
title = {A Combinatory Logic Approach to Higher-Order
{{$E$}-Unification}},
pages = {79--93},
isbn = {3-540-55602-8},
editor = {Deepak Kapur},
booktitle = {Proc.\ 11th International Conference on
Automated Deduction ({CADE}-11)},
address = {Saratoga Springs, NY},
year = 1992,
series = {Lecture Notes in Artificial Intelligence},
volume = 607,
abstract = {}
}
@conference{c_rta93,
author = {Daniel J.\ Dougherty},
title = {Some lambda calculi with categorical sums and
products},
pages = {137--151},
isbn = {3-540-56868-9},
editor = {Claude Kirchner},
booktitle = {Proc.\ 5th International Conference on
Rewriting Techniques and Applications ({RTA})},
series = {Lecture Notes in Computer Science},
volume = 690,
publisher = {Springer-Verlag},
address = {Berlin},
year = 1993,
abstract = { We consider the simply typed
$\lambda$-calculus with primitive recursion
operators and types corresponding to
categorical products and coproducts. The
standard equations corresponding to
extensionality and to surjectivity of pairing
and its dual are oriented as expansion
rules. Strong normalization and ground
(base-type) confluence is proved for the full
calculus; full confluence is proved for the
calculus omitting the rule for strong sums. In
the latter case, fixed-point constructors may
be added while retaining confluence. },
url = {rta93.pdf}
}
@article{a_ndjfl93,
title = {Closed Categories and Categorial Grammar},
author = {Daniel J.\ Dougherty},
pages = {36--49},
journal = {Notre Dame Journal of Formal Logic},
year = 1993,
volume = 34,
number = 1,
abstract = {Inspired by Lambek's work on categorial
grammar, we examine the proposal that the
theory of biclosed monoidal categories can
serve as a foundation for a formal theory of
natural language. The emphasis throughout is on
the derivation of the axioms for these
categories from linguistic intuitions. When
Montague's principle that there is a
homomorphism between syntax and semantics is
refined to the principle that meaning is a
functor between a syntax-category and a
semantics-category, the fundamental properties
of biclosed categories induce a rudimentary
computationally oriented theory of language. },
url = {ndjfl93.pdf}
}
@article{a_tcs93,
author = {Daniel J.\ Dougherty},
title = {Higher-order unification via combinators},
journal = {Theoretical Computer Science},
volume = 114,
number = 2,
pages = {273--298},
day = 21,
year = 1993,
abstract = {We present an algorithm for unification in the
simply typed lambda calculus which enumerates
complete sets of unifiers using a finitely
branching search space. In fact, the types of
terms may contain type-variables, so that a
solution may involve typesubstitution as well
as term-substitution. the problem is first
translated into the problem of unification with
respect to extensional equality in combinatory
logic, and the algorithm is defined in terms of
transformations on systems of combinatory
terms. These transformations are based on a new
method (itself based on systems) for deciding
extensional equality between typed combinatory
logic terms. },
url = {tcs93.pdf}
}
@article{a_jsc92,
author = {Daniel J.\ Dougherty and Patricia Johann},
title = {An improved general ${E}$-unification method},
journal = {Journal of Symbolic Computation},
volume = 14,
number = 4,
pages = {303--320},
year = 1992,
issn = {0747-7171},
abstract = {A generalization of Paramodulation is defined
and shown to lead to a complete E-unification
method for arbitrary equational theories E. The
method is defined in terms of transformations
on systems, building upon and refining results
of Gallier and Snyder. },
url = {jsc92.pdf}
}
@conference{c_cade90,
author = {Daniel J.\ Dougherty and Patricia Johann},
title = {An Improved General {$E$}-unification Method},
pages = {261--275},
booktitle = {Proc.\ 10th International Conference on
Automated Deduction, Kaiserslautern (Germany)},
year = {1990},
editor = {M. Stickel},
series = {Lecture Notes in Computer Science},
volume = {449},
abstract = {}
}
@article{a_iandc92,
title = {Adding Algebraic Rewriting to the Untyped
Lambda Calculus},
author = {Daniel J.\ Dougherty},
pages = {251--267},
journal = {Information and Computation},
year = 1992,
volume = 101,
number = 2,
abstract = {We investigate the system obtained by adding an
algebraic rewriting system R to an untyped
lambda calculus in which terms are formed using
the function symbols from $R$ as constants. On
certain classes of terms, called here \emph{stable},
we prove that the resulting calculus is
confluent if R is confluent, and terminating if
R is terminating. The termination result has
the corresponding theorems for several typed
calculi as corollaries. The proof of the
confluence result suggests a general method for
proving confluence of typed beta-reduction plus
rewriting; we sketch the application to the
polymorphic lambda calculus.},
url = {iandc92.pdf}
}
@conference{c_rta91,
author = {Daniel J.\ Dougherty},
title = {Adding Algebraic Rewriting to the Untyped
Lambda Calculus (Extended Abstract)},
year = 1991,
editor = {Ron Book},
booktitle = {Proc.\ of the Fourth International Conference
on Rewriting Techniques and Applications (Como,
Italy)},
pages = {37--48},
series = {Lecture Notes in Computer Science},
publisher = {},
volume = 488,
abstract = {}
}
@article{a_topapp91,
author = {Daniel J. Dougherty},
title = {A strong {B}aire category theorem for products},
journal = {Topology and Its Applications},
year = 1991,
volume = 39,
abstract = {},
pages = {105--112}
}
@article{a_jcta87,
author = {Daniel J. Dougherty},
title = {Decomposition of infinite matrices},
journal = {Journal of Combinatorial Theory (Series A)},
year = 1987,
volume = 45,
abstract = {},
pages = {277--289}
}
@article{a_jmaa87,
author = {Daniel J. Dougherty and R. M. Shortt and
R. N. Ball},
title = {Decomposition theorems for measures},
journal = {Journal of Mathematical Analysis and
Applications},
year = 1987,
volume = 128,
abstract = {},
pages = {561--580}
}
@article{a_ndjfl86,
author = {Daniel J. Dougherty},
title = {Gentzen systems, resolution, and literal trees},
journal = {Notre Dame Journal of Formal Logic},
year = 1986,
volume = 27,
abstract = {},
pages = {483--503}
}
This file was generated by bibtex2html 1.96.