@inbook {783,
title = {A Hierarchy of Scheduler Classes for Stochastic Automata},
booktitle = {Foundations of Software Science and Computation Structures - 21st International Conference, {FOSSACS} 2018, Held as Part of {ETAPS} 2018, Thessaloniki, Greece, April 14-20, 2018, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {10803},
year = {2018},
pages = {384{\textendash}402},
publisher = {Springer},
organization = {Springer},
abstract = {Stochastic automata are a formal compositional model for concurrent stochastic timed systems, with general distributions and nondeterministic choices. Measures of interest are defined over schedulers that resolve the nondeterminism. In this paper we investigate the power of various theoretically and practically motivated classes of schedulers, considering the classic complete-information view and a restriction to non-prophetic schedulers. We prove a hierarchy of scheduler classes w.r.t. unbounded probabilistic reachability. We find that, unlike Markovian formalisms, stochastic automata distinguish most classes even in this basic setting. Verification and strategy synthesis methods thus face a tradeoff between powerful and efficient classes. Using lightweight scheduler sampling, we explore this tradeoff and demonstrate the concept of a useful approximative verification technique for stochastic automata.},
isbn = {978-3-319-89365-5},
doi = {10.1007/978-3-319-89366-2_21},
url = {https://doi.org/10.1007/978-3-319-89366-2_21},
author = {Pedro R. D{\textquoteright}Argenio and Marcus Gerhold and Arnd Hartmanns and Sean Sedwards},
editor = {Christel Baier and Ugo Dal Lago}
}
@inbook {784,
title = {A Statistical Model Checker for Nondeterminism and Rare Events},
booktitle = {Tools and Algorithms for the Construction and Analysis of Systems - 24th International Conference, {TACAS} 2018, Held as Part of {ETAPS} 2018, Thessaloniki, Greece, April 14-20, 2018, Proceedings, Part {II}},
series = {Lecture Notes in Computer Science},
volume = {10806},
year = {2018},
pages = {340{\textendash}358},
publisher = {Springer},
organization = {Springer},
abstract = {Statistical model checking avoids the state space explosion problem in verification and naturally supports complex non-Markovian formalisms. Yet as a simulation-based approach, its runtime becomes excessive in the presence of rare events, and it cannot soundly analyse nondeterministic models. In this tool paper, we present modes: a statistical model checker that combines fully automated importance splitting to efficiently estimate the probabilities of rare events with smart lightweight scheduler sampling to approximate optimal schedulers in nondeterministic models. As part of the Modest Toolset, it supports a variety of input formalisms natively and via the Jani exchange format. A modular software architecture allows its various features to be flexibly combined. We highlight its capabilities with an experimental evaluation across multi-core and distributed setups on three exemplary case studies.},
isbn = {978-3-319-89962-6},
doi = {10.1007/978-3-319-89963-3_20},
url = {https://doi.org/10.1007/978-3-319-89963-3_20},
author = {Carlos E. Budde and Pedro R. D{\textquoteright}Argenio and Arnd Hartmanns and Sean Sedwards},
editor = {Dirk Beyer and Marieke Huisman}
}
@inbook {753,
title = {Better Automated Importance Splitting for Transient Rare Events},
booktitle = {Dependable Software Engineering. Theories, Tools, and Applications - Third International Symposium, {SETTA} 2017, Changsha, China, October 23-25, 2017, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {10606},
year = {2017},
pages = {42{\textendash}58},
publisher = {Springer},
organization = {Springer},
abstract = {Statistical model checking uses simulation to overcome the state space explosion problem in formal verification. Yet its runtime explodes when faced with rare events, unless a rare event simulation method like importance splitting is used. The effectiveness of importance splitting hinges on nontrivial model-specific inputs: an importance function with matching splitting thresholds. This prevents its use by non-experts for general classes of models. In this paper, we propose new method combinations with the goal of fully automating the selection of all parameters for importance splitting. We focus on transient (reachability) properties, which particularly challenged previous techniques, and present an exhaustive practical evaluation of the new approaches on case studies from the literature. We find that using Restart simulations with a compositionally constructed importance function and thresholds determined via a new expected success method most reliably succeeds and performs very well. Our implementation within the Modest Toolset supports various classes of formal stochastic models and is publicly available.},
isbn = {978-3-319-69482-5},
doi = {10.1007/978-3-319-69483-2_3},
url = {https://doi.org/10.1007/978-3-319-69483-2_3},
author = {Carlos E. Budde and Pedro R. D{\textquoteright}Argenio and Arnd Hartmanns},
editor = {Kim G. Larsen and Oleg Sokolsky and Ji Wang}
}
@inbook {716,
title = {Compositional Construction of Importance Functions in Fully Automated Importance Splitting},
booktitle = {10th EAI International Conference on Performance Evaluation Methodologies and Tools {VALUETOOLS 2016}},
year = {2017},
publisher = {ACM},
organization = {ACM},
abstract = {Importance splitting is a technique to accelerate discrete event simulation when the value to estimate depends on the occurrence of rare events. It requires a guiding importance function typically defined in an ad hoc fashion by an expert in the field, who could choose an inadequate function. In this article we present a compositional and automatic technique to derive the importance function from the model description, and analyze different composition heuristics. This technique is linear in the number of modules, in contrast to the exponential nature of our previous proposal. This approach was compared to crude simulation and to importance splitting using typical ad hoc importance functions. A prototypical tool was developed and tested on several models, showing the feasibility and efficiency of the technique.},
keywords = {discrete-event simulation, modeling and simulation, rare-event simulation},
isbn = {978-1-63190-141-6},
doi = {10.4108/eai.25-10-2016.2266501},
url = {http://dx.doi.org/10.4108/eai.25-10-2016.2266501},
author = {Carlos E. Budde and Pedro R. D{\textquoteright}Argenio and Ra{\'u}l E. Monti},
editor = {Antonio Puliafito and Kishor S. Trivedi and Bruno Tuffin and Marco Scarpa and Fumio Machida and Javier Alonso}
}
@inbook {781,
title = {Efficient simulation-based verification of probabilistic timed automata},
booktitle = {2017 Winter Simulation Conference, {WSC} 2017, Las Vegas, NV, USA, December 3-6, 2017},
year = {2017},
pages = {1419{\textendash}1430},
publisher = {{IEEE}},
organization = {{IEEE}},
abstract = {Probabilistic timed automata are a formal model for real-time systems with discrete probabilistic and nondeterministic choices. To overcome the state space explosion problem of exhaustive verification, a symbolic simulation-based approach that soundly treats nondeterminism to approximate maximum and minimum reachability probabilities has recently become available. Its use of difference-bound matrices to handle continuous real time however leads to poor performance: most operations are cubic or even exponential in the number of clock variables. In this paper, we propose a novel region-based approach and data structure that reduce the complexity of all operations to being linear. It relies on a particular mapping between symbolic regions and concrete representative valuations. Using an implementation within the MODEST TOOLSET, we show that the new approach is not only easier to implement, but indeed significantly outperforms all current alternatives on standard benchmark models.},
isbn = {978-1-5386-3428-8},
doi = {10.1109/WSC.2017.8247885},
url = {https://doi.org/10.1109/WSC.2017.8247885},
author = {Arnd Hartmanns and Sean Sedwards and Pedro R. D{\textquoteright}Argenio}
}
@inbook {726,
title = {JANI: Quantitative Model and Tool Interaction},
booktitle = {Tools and Algorithms for the Construction and Analysis of Systems: 23rd International Conference, TACAS 2017},
year = {2017},
pages = {151{\textendash}168},
publisher = {Springer Berlin Heidelberg},
organization = {Springer Berlin Heidelberg},
address = {Berlin, Heidelberg},
abstract = {The formal analysis of critical systems is supported by a vast space of modelling formalisms and tools. The variety of incompatible formats and tools however poses a significant challenge to practical adoption as well as continued research. In this paper, we propose the Jani model format and tool interaction protocol. The format is a metamodel based on networks of communicating automata and has been designed for ease of implementation without sacrificing readability. The purpose of the protocol is to provide a stable and uniform interface between tools such as model checkers, transformers, and user interfaces. Jani uses the Json data format, inheriting its ease of use and inherent extensibility. Jani initially targets, but is not limited to, quantitative model checking. Several existing tools now support the verification of Jani models, and automatic converters from a diverse set of higher-level modelling languages have been implemented. The ultimate purpose of Jani is to simplify tool development, encourage research cooperation, and pave the way towards a future competition in quantitative model checking.},
isbn = {978-3-662-54580-5},
doi = {10.1007/978-3-662-54580-5_9},
url = {http://dx.doi.org/10.1007/978-3-662-54580-5_9},
author = {Carlos E. Budde and Dehnert, Christian and Hahn, Ernst Moritz and Arnd Hartmanns and Junges, Sebastian and Turrini, Andrea}
}
@inbook {752,
title = {The Road from Stochastic Automata to the Simulation of Rare Events},
booktitle = {ModelEd, TestEd, TrustEd. Essays Dedicated to Ed Brinksma on the Occasion of His 60th Birthday},
series = {Lecture Notes in Computer Science},
volume = {10500},
year = {2017},
pages = {276-294},
publisher = {Springer},
organization = {Springer},
abstract = {We report in the advances on stochastic automata and its use on rare event simulation. We review and introduce an extension of IOSA, an input/output variant of stochastic automata that under mild constraints can be ensured to contain non-determinism only in a spurious manner. That is, the model can be regarded as fully probabilistic and hence amenable for simulation. We also report on our latest work on fully automatizing the technique of rare event simulation. Using the structure of the model given in terms a network of IOSAs allows us to automatically derive the importance function, which is crucial for the importance splitting technique of rare event simulation. We conclude with experimental results that show how promising our technique is.},
isbn = {978-3-319-68269-3},
doi = {10.1007/978-3-319-68270-9_14},
url = {https://doi.org/10.1007/978-3-319-68270-9_14},
author = {Pedro R. D{\textquoteright}Argenio and Carlos E. Budde and Mat{\'\i}as David Lee and Ra{\'u}l E. Monti and Leonardo Rodr{\'\i}guez and Nicol{\'a}s Wolovick},
editor = {Joost-Pieter Katoen and Rom Langerak and Arend Rensink}
}
@inbook {714,
title = {Is Your Software on Dope? - Formal Analysis of Surreptitiously "enhanced" Programs},
booktitle = {Programming Languages and Systems - 26th European Symposium on Programming, {ESOP} 2017, Held as Part of the European Joint Conferences on Theory and Practice of Software, {ETAPS} 2017, Uppsala, Sweden, April 22-29, 2017, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {10201},
year = {2017},
pages = {83{\textendash}110},
publisher = {Springer},
organization = {Springer},
abstract = {Usually, it is the software manufacturer who employs verification or testing to ensure that the software embedded in a device meets its main objectives. However, these days we are confronted with the situation that economical or technological reasons might make a manufacturer become interested in the software slightly deviating from its main objective for dubious reasons. Examples include lock-in strategies and the NOx
emission scandals in automotive industry. This phenomenon is what we call software doping. It is turning more widespread as software is embedded in ever more devices of daily use.
The primary contribution of this article is to provide a hierarchy of simple but solid formal definitions that enable to distinguish whether a program is clean or doped. Moreover, we show that these characterisations provide an immediate framework for analysis by using already existing verification techniques. We exemplify this by applying self-composition on sequential programs and model checking of HyperLTL formulas on reactive models.},
isbn = {978-3-662-54433-4},
doi = {10.1007/978-3-662-54434-1_4},
url = {http://dx.doi.org/10.1007/978-3-662-54434-1_4},
author = {Pedro R. D{\textquoteright}Argenio and Gilles Barthe and Sebastian Biewer and Bernd Finkbeiner and Holger Hermanns},
editor = {Hongseok Yang}
}
@conference {666,
title = {Consideraciones Sobre el Voto Electr{\'o}nico},
booktitle = {10{\textordmasculine} Simposio de Inform{\'a}tica en el Estado, SIE 2016, 45 JAIIO},
year = {2016},
pages = {297-307},
publisher = {SADIO},
organization = {SADIO},
abstract = {El voto electr{\'o}nico es un tema que se ha instalado tanto en las agendas de gobierno (nacional, provincial y municipal) como en la opini{\'o}n p{\'u}blica. En este trabajo discutimos algunas posibles definiciones de voto electr{\'o}nico; analizamos algunas debilidades, tanto pr{\'a}cticas, seg{\'u}n revelan algunas experiencias en el mundo, como te{\'o}ricas; y postulamos una serie de requerimientos que a nuestro criterio deber{\'\i}a cumplir cualquier sistema de este tipo.},
isbn = {2451-7534},
url = {http://45jaiio.sadio.org.ar/sites/default/files/SIE-27.PDF},
author = {Miguel Montes and Daniel Penazzi and Nicol{\'a}s Wolovick}
}
@inbook {668,
title = {Facets of Software Doping},
booktitle = {Leveraging Applications of Formal Methods, Verification and Validation: Discussion, Dissemination, Applications - 7th International Symposium, (ISoLA 2016), Part II},
series = {Lecture Notes in Computer Science},
volume = {9953},
year = {2016},
pages = {601-607},
publisher = {Springer},
organization = {Springer},
abstract = {This paper provides an informal discussion of the formal aspects of software doping.},
isbn = {978-3-319-47168-6},
doi = {10.1007/978-3-319-47169-3_46},
url = {http://dx.doi.org/10.1007/978-3-319-47169-3_46},
author = {Gilles Barthe and Pedro R. D{\textquoteright}Argenio and Bernd Finkbeiner and Holger Hermanns},
editor = {Tiziana Margaria and Bernhard Steffen}
}
@article {655,
title = {A general {SOS} theory for the specification of probabilistic transition systems},
journal = {Inf. Comput.},
volume = {249},
year = {2016},
pages = {76{\textendash}109},
abstract = {
This article focuses on the formalization of the structured operational semantics approach for languages with primitives that introduce probabilistic and non-deterministic behavior. We define a general theoretic framework and present the ntμfθ/ntμxθ rule format that guarantees that bisimulation equivalence (in the probabilistic setting) is a congruence for any operator defined in this format. We show that the bisimulation is fully abstract w.r.t. the ntμfθ/ntμxθ format and (possibilistic) trace equivalence in the sense that bisimulation is the coarsest congruence included in trace equivalence for any operator definable within the ntμfθ/ntμxθ format (in other words, bisimulation is the smallest congruence relation guaranteed by the format). We also provide a conservative extension theorem and show that languages that include primitives for exponentially distributed time behavior (such as IMC and Markov automata based language) fit naturally within our framework.},
doi = {10.1016/j.ic.2016.03.009},
url = {http://dx.doi.org/10.1016/j.ic.2016.03.009},
author = {Pedro R. D{\textquoteright}Argenio and Daniel Gebler and Mat{\'\i}as David Lee}
}
@inbook {654,
title = {Input/Output Stochastic Automata - Compositionality and Determinism},
booktitle = {Formal Modeling and Analysis of Timed Systems - 14th International Conference, FORMATS 2016, Quebec, QC, Canada, August 24-26, 2016, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {9884},
year = {2016},
pages = {53{\textendash}68},
publisher = {Springer},
organization = {Springer},
abstract = {Stochastic automata provide a way to symbolically model systems in which the occurrence time of events may respond to any continuous random variable. We introduce here an input/output variant of stochastic automata that, once the model is closed {\textemdash}i.e., all synchronizations are resolved{\textemdash}, the resulting automaton does not contain non-deterministic choices. This is important since fully probabilistic models are amenable to simulation in the general case and to much more efficient analysis if restricted to Markov models. We present here a theoretical introduction to input/output stochastic automata (IOSA) for which we (i) provide a concrete semantics in terms of non-deterministic labeled Markov processes (NLMP), (ii) prove that bisimulation is a congruence for parallel composition both in NLMP and IOSA, (iii) show that parallel composition commutes in the symbolic and concrete level, and (iv) provide a proof that a closed IOSA is indeed deterministic.},
isbn = {978-3-319-44877-0},
doi = {10.1007/978-3-319-44878-7_4},
url = {http://dx.doi.org/10.1007/978-3-319-44878-7_4},
author = {Pedro R. D{\textquoteright}Argenio and Mat{\'\i}as David Lee and Ra{\'u}l E. Monti},
editor = {Martin Fr{\"a}nzle and Nicolas Markey}
}
@inbook {632,
title = {Statistical Approximation of Optimal Schedulers for Probabilistic Timed Automata},
booktitle = {Integrated Formal Methods: 12th International Conference, IFM 2016, Reykjavik, Iceland, June 1-5, 2016, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {9681},
year = {2016},
pages = {99-114},
publisher = {Springer},
organization = {Springer},
abstract = {The verification of probabilistic timed automata involves finding schedulers that optimise their nondeterministic choices with respect to the probability of a property. In practice, approaches based on model checking fail due to state-space explosion, while simulation-based techniques like statistical model checking are not applicable due to the nondeterminism. We present a new lightweight on-the-fly algorithm to find near-optimal schedulers for probabilistic timed automata. We make use of the classical region and zone abstractions from timed automata model checking, coupled with a recently developed smart sampling technique for statistical verification of Markov decision processes. Our algorithm provides estimates for both maximum and minimum probabilities. We compare our new approach with alternative techniques, first using tractable examples from the literature, then motivate its scalability using case studies that are intractable to numerical model checking and challenging for existing statistical techniques.},
isbn = {978-3-319-33693-0},
doi = {10.1007/978-3-319-33693-0_7},
url = {http://dx.doi.org/10.1007/978-3-319-33693-0_7},
author = {Pedro R. D{\textquoteright}Argenio and Arnd Hartmanns and Axel Legay and Sean Sedwards},
editor = {Erika {\'A}brah{\'a}m and Marieke Huisman}
}
@inbook {552,
title = {Rare Event Simulation with Fully Automated Importance Splitting},
booktitle = {Computer Performance Engineering - 12th European Workshop, {EPEW} 2015, Madrid, Spain, August 31 - September 1, 2015, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {9272},
year = {2015},
pages = {275{\textendash}290},
publisher = {Springer},
organization = {Springer},
abstract = {Probabilistic model checking is a powerful tool for analysing probabilistic systems but it can only be efficiently applied to Markov models. Monte Carlo simulation provides an alternative for the generality of stochastic processes, but becomes infeasible if the value to estimate depends on the occurrence of rare events. To combat this problem, intelligent simulation strategies exist to lower the estimation variance and hence reduce the simulation time. Importance splitting is one such technique, but requires a guiding function typically defined in an ad hoc fashion by an expert in the field. We present an automatic derivation of the importance function from the model description. A prototypical tool was developed and tested on several Markov models, compared to analytically and numerically calculated results and to results of typical ad hoc importance functions, showing the feasibility and efficiency of this approach. The technique is easily adapted to general models like GSMPs.
},
isbn = {978-3-319-23266-9},
doi = {10.1007/978-3-319-23267-6_18},
url = {http://dx.doi.org/10.1007/978-3-319-23267-6_18},
author = {Carlos E. Budde and Pedro R. D{\textquoteright}Argenio and Holger Hermanns},
editor = {Marta Beltr{\'a}n and William J. Knottenbelt and Jeremy T. Bradley}
}
@inbook {572,
title = {Rooted branching bisimulation as a congruence for probabilistic transition systems},
booktitle = {Proceedings Thirteenth Workshop on Quantitative Aspects of Programming Languages and Systems, London, UK, 11th-12th April 2015},
series = {Electronic Proceedings in Theoretical Computer Science},
volume = {194},
year = {2015},
pages = {79-94},
publisher = {Open Publishing Association},
organization = {Open Publishing Association},
abstract = {We propose a probabilistic transition system specification format, referred to as probabilistic RBB safe, for which rooted branching bisimulation is a congruence. The congruence theorem is based on the approach of Fokkink for the qualitative case. For this to work, the theory of transition system specifications in the setting of labeled transition systems needs to be extended to deal with probability distributions, both syntactically and semantically. We provide a scheduler-free characterization of probabilistic branching bisimulation as adapted from work of Andova et al. for the alternating model. Counter examples are given to justify the various conditions required by the format.},
doi = {10.4204/EPTCS.194.6},
author = {Lee, Matias D. and de Vink, Erik P.},
editor = {Bertrand, Nathalie and Tribastone, Mirco}
}
@article {554,
title = {Smart sampling for lightweight verification of Markov decision processes},
journal = {International Journal on Software Tools for Technology Transfer},
volume = {17},
year = {2015},
pages = {469{\textendash}484},
abstract = {Markov decision processes (MDP) are useful to model optimisation problems in concurrent systems. To verify MDPs with efficient Monte Carlo techniques requires that their nondeterminism be resolved by a scheduler. Recent work has introduced the elements of lightweight techniques to sample directly from scheduler space, but finding optimal schedulers by simple sampling may be inefficient. Here we describe {\textquotedblleft}smart{\textquotedblright} sampling algorithms that can make substantial improvements in performance.},
doi = {10.1007/s10009-015-0383-0},
url = {http://dx.doi.org/10.1007/s10009-015-0383-0},
author = {Pedro R. D{\textquoteright}Argenio and Axel Legay and Sean Sedwards and Louis-Marie Traonouez}
}
@inbook {553,
title = {{SOS} rule formats for convex and abstract probabilistic bisimulations},
booktitle = {Proceedings of the Combined 22th International Workshop on Expressiveness in Concurrency and 12th Workshop on Structural Operational Semantics, and 12th Workshop on Structural Operational Semantics, {EXPRESS/SOS} 2015, Madrid, Spain, 31st August 2015.},
series = {EPTCS},
volume = {190},
year = {2015},
pages = {31{\textendash}45},
abstract = {Probabilistic transition system specifications (PTSSs) in the ntμfθ/ntμxθ format provide structural operational semantics for Segala-type systems that exhibit both probabilistic and nondeterministic behavior and guarantee that bisimilarity is a congruence for all operator defined in such format. Starting from the ntμfθ/ntμxθ format, we obtain restricted formats that guarantee that three coarser bisimulation equivalences are congruences. We focus on (i) Segala{\textquoteright}s variant of bisimulation that considers combined transitions, which we call here "convex bisimulation"; (ii) the bisimulation equivalence resulting from considering Park \& Milner{\textquoteright}s bisimulation on the usual stripped probabilistic transition system (translated into a labelled transition system), which we call here "probability obliterated bisimulation"; and (iii) a "probability abstracted bisimulation", which, like bisimulation, preserves the structure of the distributions but instead, it ignores the probability values. In addition, we compare these bisimulation equivalences and provide a logic characterization for each of them.},
doi = {10.4204/EPTCS.190.3},
url = {http://dx.doi.org/10.4204/EPTCS.190.3},
author = {Pedro R. D{\textquoteright}Argenio and Mat{\'\i}as David Lee and Daniel Gebler},
editor = {Silvia Crafa and Daniel Gebler}
}
@article {574,
title = {Special Issue: Selected Papers of the "24th International Conference on Concurrency Theory" CONCUR 2013},
journal = {Logical Methods in Computer Science},
year = {2015},
abstract = {This volume contains a selection of the top papers presented at the 24th Conference on Concurrency Theory (CONCUR 2013) held in Buenos Aires, Argentina, during August 27-30, 2013.},
issn = {1860-5974},
doi = {10.2168/LMCS-CONCUR:2013},
url = {http://lmcs-online.org/ojs/specialIssues.php?id=61},
author = {Pedro R. D{\textquoteright}Argenio and Hern{\'a}n Melgratti and Davide Sangiorgi}
}
@conference {413,
title = {Axiomatizing Bisimulation Equivalences and Metrics from Probabilistic SOS Rules},
booktitle = {Foundations of Software Science and Computation Structures - 17th International Conference, FOSSACS 2014, Held as Part of the European Joint Conferences on Theory and Practice of Software, ETAPS 2014, Grenoble, France, April 5-13, 2014, Proceedings},
series = {Lecture Notes in Computer Science},
volume = {8412},
year = {2014},
pages = {289-303},
publisher = {Springer},
organization = {Springer},
abstract = {Probabilistic transition system specifications (PTSS) provide structural operational semantics for reactive probabilistic labeled transition systems. Bisimulation equivalences and bisimulation metrics are fundamental notions to describe behavioral relations and distances of states, respectively. We provide a method to generate from a PTSS a sound and ground-complete equational axiomatization for strong and convex bisimilarity. The construction is based on the method of Aceto, Bloom and Vaandrager developed for non-deterministic transition system specifications. The novelty in our approach is to employ many-sorted algebras to axiomatize separately non-deterministic choice, probabilistic choice and their interaction. Furthermore, we generalize this method to axiomatize the strong and convex metric bisimulation distance of PTSS.},
isbn = {978-3-642-54829-1},
doi = {10.1007/978-3-642-54830-7_19},
author = {Pedro R. D{\textquoteright}Argenio and Daniel Gebler and Mat{\'\i}as David Lee},
editor = {Anca Muscholl}
}
@article {425,
title = {Distributed probabilistic input/output automata: Expressiveness, (un)decidability and algorithms},
journal = {Theoretical Computer Science},
volume = {538},
year = {2014},
pages = {84-102},
abstract = {Abstract Probabilistic model checking computes the probability values of a given property quantifying over all possible schedulers. It turns out that maximum and minimum probabilities calculated in such a way are over-estimations on models of distributed systems in which components are loosely coupled and share little information with each other (and hence arbitrary schedulers may result too powerful). Therefore, we introduced definitions that characterise which are the schedulers that properly capture the idea of distributed behaviour in probabilistic and nondeterministic systems modelled as a set of interacting components. In this paper, we provide an overview of the work we have done in the last years which includes: (1) the definitions of distributed and strongly distributed schedulers, providing motivation and intuition; (2) expressiveness results, comparing them to restricted versions such as deterministic variants or finite-memory variants; (3) undecidability results{\textemdash}in particular the model checking problem is not decidable in general when restricting to distributed schedulers; (4) a counterexample-guided refinement technique that, using standard probabilistic model checking, allows to increase precision in the actual bounds in the distributed setting; and (5) a revision of the partial order reduction technique for probabilistic model checking. We conclude the paper with an extensive review of related work dealing with similar approaches to ours.},
keywords = {Distributed systems, Interleaving, Markov decision processes, Nondeterminism, Partial observation, Probabilistic systems},
issn = {0304-3975},
doi = {http://dx.doi.org/10.1016/j.tcs.2013.07.017},
url = {http://www.sciencedirect.com/science/article/pii/S0304397513005203},
author = {Sergio Giro and Pedro R. D{\textquoteright}Argenio and Luis Mar{\'\i}a {Ferrer Fioriti}}
}
@inbook {513,
title = {A Theory for the Semantics of Stochastic and Non-deterministic Continuous Systems},
booktitle = {Stochastic Model Checking. Rigorous Dependability Analysis Using Model Checking Techniques for Stochastic Systems},
series = {Lecture Notes in Computer Science},
volume = {8453},
year = {2014},
pages = {67-86},
publisher = {Springer Berlin Heidelberg},
organization = {Springer Berlin Heidelberg},
abstract = {The description of complex systems involving physical or biological components usually requires to model complex continuous behavior induced by variables such as time, distance, speed, temperature, alkalinity of a solution, etc. Often, such variables can be quantified probabilistically to better understand the behavior of the complex systems. For example, the arrival time of events may be considered a Poisson process or the weight of an individual may be assumed to be distributed according to a log-normal distribution. However, it is also common that the uncertainty on how these variables behave makes us prefer to leave out the choice of a particular probability and rather model it as a purely non-deterministic decision, as it is the case when a system is intended to be deployed in a variety of very different computer or network architectures. Therefore, the semantics of these systems needs to be represented by a variant of probabilistic automata that involves continuous domains on the state space and the transition relation.
In this paper, we provide a survey on the theory of such kind of models. We present the theory of the so-called labeled Markov processes (LMP) and its extension with internal non-determinism (NLMP). We show that in these complex domains, the bisimulation relation can be understood in different manners. We show the relation between the different bisimulations and try to understand their expressiveness through examples. We also study variants of Hennessy-Milner logic that provides logical characterizations of some of these bisimulations.},
isbn = {978-3-662-45488-6},
doi = {10.1007/978-3-662-45489-3_3},
url = {http://dx.doi.org/10.1007/978-3-662-45489-3_3},
author = {Carlos E. Budde and Pedro R. D{\textquoteright}Argenio and Pedro S{\'a}nchez Terraf and Nicol{\'a}s Wolovick},
editor = {Remke, Anne and Pedro R. D{\textquoteright}Argenio}
}
@proceedings {288,
title = {CONCUR 2013 - Concurrency Theory - 24th International Conference, CONCUR 2013, Buenos Aires, Argentina, August 27-30, 2013. Proceedings},
journal = {Lecture Notes in Computer Science},
volume = {8052},
year = {2013},
publisher = {Springer},
isbn = {978-3-642-40183-1},
doi = {10.1007/978-3-642-40184-8},
editor = {Pedro R. D{\textquoteright}Argenio and Hern{\'a}n Melgratti}
}
@conference {333,
title = {Distributed analysis of diagnosability in concurrent systems},
booktitle = {Proceedings of the 24th International Workshop on Principles of Diagnosis (DX{\textquoteright}13)},
year = {2013},
address = {Jerusalem, Israel},
abstract = {Complex systems often exhibit unexpected faults that are difficult to handle. Such systems are desirable to be diagnosable, i.e. faults can be automatically detected as they occur (or shortly afterwards), enabling the system to handle the fault or recover. A system is diagnosable if it is possible to detect every fault, in a finite time after they occurred, by only observing the available information from the system. Complex systems are usually built from simpler components running concurrently. We study how to infer the diagnosability property of a complex system (distributed and with multiple faults) from a parallelized analysis of the diagnosability of each of its components synchronizing with fault free versions of the others. In this paper we make the following contributions: (1) we address the diagnosability problem of concurrent systems with arbitrary faults occurring freely in each component. (2) We distribute the diagnosability analysis and illustrate our approach with examples. Moreover, (3) we present a prototype tool that implements our techniques showing promising results.},
author = {Hern{\'a}n Ponce de Le{\'o}n and Gonzalo Bonigo and Laura Brand{\'a}n Briones},
editor = {Meir Kalech and Alexander Feldman and Gregory Provan}
}
@article {293,
title = {Enhanced transport through desorption-mediated diffusion},
journal = {Physical Review E},
volume = {87},
year = {2013},
month = {01/2013},
pages = {012115},
abstract = {We present a master equation approach to the study of the bulk-mediated surface diffusionmechanism in a three-dimensional
confined domain. The proposed scheme allowed us to evaluate analytically a number of magnitudes that were used to characterize the efficiency of the bulk-mediated surface transport mechanism, for instance, the mean escape time from the domain, and themean number of distinct visited sites on the confined domain boundary.},
keywords = {desorption-mediated diffusion, Random walk},
doi = {10.1103/PhysRevE.87.012115},
author = {F{\'e}lix Rojo and Carlos Ernesto Budde and Carlos E. Budde and Horacio S. Wio}
}
@conference {224,
title = {A Petri Net Based Analysis of Deadlocks for Active Objects and Futures},
booktitle = {Formal Aspects of Component Software, 9th International Symposium, FACS 2012, Mountain View, CA, USA, September 12-14, 2012. Revised Selected Papers},
series = {Lecture Notes in Computer Science},
volume = {7684},
year = {2013},
pages = {110-127},
publisher = {Springer},
organization = {Springer},
abstract = {We give two different notions of deadlock for systems based on active objects and futures. One is based on blocked objects and conforms with the classical definition of deadlock by Coffman, Jr. et al. The other one is an extended notion of deadlock based on blocked processes which is more general than the classical one. We introduce a technique to prove deadlock freedom of systems of active objects. To check deadlock freedom an abstract version of the program is translated into Petri nets. Extended deadlocks, and then also classical deadlock, can be detected via checking reachability of a distinct marking. Absence of deadlocks in the Petri net constitutes deadlock freedom of the concrete system.},
isbn = {978-3-642-35860-9},
doi = {10.1007/978-3-642-35861-6_7},
author = {Frank S. de Boer and Mario Bravetti and Immo Grabe and Mat{\'\i}as David Lee and Martin Steffen and Gianluigi Zavattaro},
editor = {Corina S. Pasareanu and Gwen Sala{\"u}n}
}
@proceedings {289,
title = {Quantitative Evaluation of Systems - 10th International Conference, QEST 2013, Buenos Aires, Argentina, August 27-30, 2013. Proceedings},
journal = {Lecture Notes in Computer Science},
volume = {8054},
year = {2013},
publisher = {Springer},
isbn = {978-3-642-40195-4},
doi = {10.1007/978-3-642-40196-1},
editor = {Kaustubh R. Joshi and Markus Siegle and Mari{\"e}lle Stoelinga and Pedro R. D{\textquoteright}Argenio}
}
@conference {304,
title = {Security Analysis in Probabilistic Distributed Protocols via Bounded Reachability},
booktitle = {Trustworthy Global Computing - 7th International Symposium, TGC 2012, Newcastle upon Tyne, UK, September 7-8, 2012, Revised Selected Papers},
series = {Lecture Notes in Computer Science},
volume = {8191},
year = {2013},
pages = {182-197},
publisher = {Springer},
organization = {Springer},
abstract = {We present a framework to analyze security properties in distributed protocols. The framework is constructed on top of the so called (strongly) distributed schedulers where secrecy is also considered. Secrecy is presented as an equivalence class on actions to those components that do not have access to such secrets; however these actions can be distinguished by those with appropriate clearance. We also present an algorithm to solve bounded reachability analysis on this kind of models. The algorithm appropriately encodes the nondeterministic model by interpreting the decisions of the schedulers as parameters. The problem is then reduced to a polynomial optimization problem.},
isbn = {978-3-642-41156-4},
doi = {10.1007/978-3-642-41157-1_12},
author = {Silvia S. Pelozo and Pedro R. D{\textquoteright}Argenio},
editor = {Catuscia Palamidessi and Mark Dermot Ryan}
}
@mastersthesis {260,
title = {Sem{\'a}ntica de procesos para sistemas interactivos y sistemas probabil{\'\i}sticos},
volume = {Doctor},
year = {2013},
pages = {132},
school = {Universidad Nacional de C{\'o}rdoba},
address = {C{\'o}rdoba},
abstract = {En esta tesis estudiamos sem{\'a}nticas para sistemas interactivos, i.e. sistemas donde la ejecuci{\'o}n de una acci{\'o}n est{\'a} controlada por el mismo sistema o por un agente externo. El enfoque utilizado se basa en tipos de observac{\'o}n y nociones de observabilidad. Utilizando una variante de sistema interactivos se estudia la propiedad de seguridad denominada no-interferencia. Si un sistema satisface esta propiedad, entonces {\'e}ste realiza una manipulacion segura de la informaci{\'o}n.
Adem{\'a}s estudiamos el problema de definir lenguajes para especificar sistemas de transiciones probabilistas. El enfoque utilizado se basa en sem{\'a}ntica operacional estructurada (SOS). En este contexto presentamos el formato ntμfθ/ntμxθ y estudiamos distintas propiedades
del mismo.},
keywords = {acciones de entrada y salida, bisimulaci{\'o}n, congruencia., formatos de regla, no interferencia, sem{\'a}ntica de procesos, sem{\'a}ntica operacional estructurada, sistemas de transiciones},
author = {Mat{\'\i}as David Lee}
}
@conference {291,
title = {A theory for the semantics of continuous systems with stochastic and structural non-determinism},
booktitle = {YR-CONCUR},
year = {2013},
month = {08/2013},
address = {Buenos Aires, Argentina},
abstract = {We report an approach to modelling the semantics of complex systems, comprising non-deterministic
and stochastic behaviour inside continuous domains. The theory is based on the mathematical field of measure theory, and extends labelled Markov processes (LMP) with internal non-determinism. We show how the bisimulation relation can be understood in different manners, and mention the known boundaries between the different resulting definitions. We also review a variant of Hennessy-Milner logic that provides logical characterizations of some of these bisimulations.},
keywords = {Bisimulation, Hennessy-Milner Logic, Measure Theory, Nondeterministic labelled Markov Processes},
author = {Carlos E. Budde}
}
@article {196,
title = {{Bisimilarity is not Borel}},
journal = {ArXiv e-prints},
year = {2012},
keywords = {03B70, 03E15, 28A05, Computer Science - Logic in Computer Science, F.1.2, F.4.1, Mathematics - Logic},
author = {Pedro S{\'a}nchez Terraf}
}
@article {39,
title = {Bisimulations for non-deterministic labelled Markov processes},
journal = {Mathematical Structures in Computer Science},
volume = {22},
year = {2012},
month = {2/2012},
pages = {43 - 68},
abstract = {We extend the theory of labelled Markov processes to include internal non-determinism, which is a fundamental concept for the further development of a process theory with abstraction on non-deterministic continuous probabilistic systems. We define non-deterministic labelled Markov processes (NLMP) and provide three definitions of bisimulations: a bisimulation following a traditional characterisation; a state-based bisimulation tailored to our {\textquoteleft}measurable{\textquoteright} non-determinism; and an event-based bisimulation. We show the relations between them, including the fact that the largest state bisimulation is also an event bisimulation. We also introduce a variation of the Hennessy{\textendash}Milner logic that characterises event bisimulation and is sound with respect to the other bisimulations for an arbitrary NLMP. This logic, however, is infinitary as it contains a denumerable . We then introduce a finitary sublogic that characterises all bisimulations for an image finite NLMP whose underlying measure space is also analytic. Hence, in this setting, all the notions of bisimulation we consider turn out to be equal. Finally, we show that all these bisimulation notions are different in the general case. The counterexamples that separate them turn out to be non-probabilistic NLMPs.},
issn = {0960-1295},
doi = {10.1017/S0960129511000454},
author = {Pedro R. D{\textquoteright}Argenio and Pedro S{\'a}nchez Terraf and Nicol{\'a}s Wolovick}
}
@mastersthesis {175,
title = {Continuous probability and nondeterminism in labeled transaction systems},
volume = {PhD},
year = {2012},
school = {Universidad Nacional de C{\'o}rdoba},
type = {PhD},
address = {C{\'o}rdoba},
abstract = {We define a model for interacting systems involving continuous probabilistic and nondeterministic choices over continuous state spaces. Our model is a generalization of labeled Markov process, a well established formalism that capture interacting probabilistic continuous systems with strong basis in Measure Theory, but lacking internal nondeterminism. We define the extension to continuous internal nondeterminism in such a way that it allows for quantification of nondeterminism through schedulers, and it also allows for a sound definition of an existential modal operator. The model is used to capture the semantics of a probabilistic timed stochastic process algebra, a stochastic hybrid automata, and a probabilistic and nondeterministic programming language; they are continuous systems involving a nontrivial mix of probabilities and nondeterminism. We also compare our model with other know continuous probabilistic and nondeterministic labeled transition systems and the embedding is defined if applicable. Behavioral equivalence is captured by one notion of point-wise strong bisimulation, and two notions of event-wise strong bisimulation. We show that the three notions are different. We define a two-level infinitary modal logic that characterize the coarser behavioral event-wise bisimulation. Finally we show that a model and a randomized history-dependent scheduler resolving the nondeterminism renders a probabilistic trace semantics.},
author = {Nicol{\'a}s Wolovick}
}
@mastersthesis {292,
title = {No determinismo completamente medible en procesos probabil{\'\i}sticos continuos},
volume = {MSc},
year = {2012},
month = {04/2012},
pages = {86},
school = {Universidad Nacional de C{\'o}rdoba},
type = { MSc},
address = {C{\'o}rdoba},
abstract = {Este trabajo se encuadra dentro del {\'a}mbito computacional en la sem{\'a}ntica de procesos. El concepto usual de {\textquotedblleft}Sistema de Transici{\'o}n Etiquetado{\textquotedblright} (LTS{\textemdash}Labelled transition system) s{\'o}lo permite modelar sistemas cuyo espacio de estados tiene dimensi{\'o}n finita o, a lo sumo, numerable. Mediante herramientas matem{\'a}ticas topol{\'o}gicas y de teor{\'\i}a de
la medida, el grupo de Jos{\'e}e Desharnais extiende la expresividad del modelo a espacios medibles de dimensi{\'o}n potencialmente infinita no numerable. A estos nuevos sistemas se los denomina {\textquotedblleft}procesos de Markov etiquetados{\textquotedblright} (LMP{\textemdash}Labelled Markov processes).
Los LMPs no consideran situaciones de no determinismo interno, que son comunes tanto en situaciones de laboratorio como en las aplicaciones a casos espec{\'\i}ficos de la vida real. Para suplir esta falencia surgen los {\textquotedblleft}procesos de Markov etiquetados no deterministas{\textquotedblright} (NLMPs{\textemdash}Non-deterministic labelled Markov processes). El estudio de estos procesos por
parte del grupo de D{\textquoteright}Argenio revela diferencias entre las diversas nociones de bisimulaci{\'o}n del modelo, tanto en casos determin{\'\i}sticos como en el escenario m{\'a}s general. Aqu{\'\i} se busca acercar dichas nociones.
El presente trabajo extiende la teor{\'\i}a de los procesos de Markov etiquetados no deterministas, otorg{\'a}ndole a estos sistemas una estructura medible sobre su espacio de
etiquetas. Las bisimulaciones, la l{\'o}gica asociada y todos los resultados principales para los NLMPs se adaptan a la teor{\'\i}a con estructura en las etiquetas. Se prueba adem{\'a}s que
este nuevo modelo es un caso particular del anterior, en el cual dejan de ser v{\'a}lidos ciertos sistemas ideados por el grupo de D{\textquoteright}Argenio, que verifican (en procesos sin estructura en
las etiquetas) la existencia de diferencias entre las relaciones de bisimulaci{\'o}n.},
keywords = {Labelled Markov processes, LMP, measurable non-determinism, NLMP, structured NLMP},
author = {Carlos E. Budde}
}
@conference {123,
title = {Probabilistic Transition System Specification: Congruence and Full Abstraction of Bisimulation},
booktitle = {Foundations of Software Science and Computational Structures - 15th International Conference, FOSSACS 2012, Held as Part of ETAPS 2012, Tallinn, Estonia, March 24 - April 1, 2012. Proceedings},
series = {Lecture Notes in Computer Science},
volume = {7213},
year = {2012},
pages = {452-466},
publisher = {Springer},
organization = {Springer},
abstract = {We present a format for the specification of probabilistic transition systems that guarantees that bisimulation equivalence is a congruence for any operator defined in this format. In this sense, the format is somehow comparable to the ntyft/ntyxt format in a non-probabilistic setting. We also study the modular construction of probabilistic transition systems specifications and prove that some standard conservative extension theorems also hold in our setting. Finally, we show that the trace congruence for image-finite processes induced by our format is precisely bisimulation on probabilistic systems.},
isbn = {978-3-642-28728-2},
doi = {10.1007/978-3-642-28729-9_30},
author = {Pedro R. D{\textquoteright}Argenio and Mat{\'\i}as David Lee},
editor = {Lars Birkedal}
}
@article {128,
title = {q-state Potts model metastability study using optimized GPU-based Monte Carlo algorithms},
journal = {Computer Physics Communications},
volume = {183},
number = {8},
year = {2012},
pages = {1578-1587},
abstract = {We implemented a GPU-based parallel code to perform Monte Carlo simulations of the two-dimensional q-state Potts model. The algorithm is based on a checkerboard update scheme and assigns independent random number generators to each thread. The implementation allows to simulate systems up to \~{}109 spins with an average time per spin flip of 0.147 ns on the fastest GPU card tested, representing a speedup up to 155{\texttimes}, compared with an optimized serial code running on a high-end CPU.
The possibility of performing high speed simulations at large enough system sizes allowed us to provide a positive numerical evidence about the existence of metastability on very large systems based on Binder's criterion, namely, on the existence or not of specific heat singularities at spinodal temperatures different of the transition one.},
doi = {10.1016/j.cpc.2012.02.026},
author = {Ezequiel E. Ferrero and Juan Pablo De Francesco and Nicol{\'a}s Wolovick and Sergio A. Cannas}
}
@article {124,
title = {Reconciling real and stochastic time: the need for probabilistic refinement},
journal = {Formal Aspects of Computing},
volume = {24},
number = {4-6},
year = {2012},
pages = {497-518},
abstract = {We conservatively extend an ACP-style discrete-time process theory with discrete stochastic delays. The semantics of the timed delays relies on time additivity and time determinism, which are properties that enable us to merge subsequent timed delays and to impose their synchronous expiration. Stochastic delays, however, interact with respect to a so-called race condition that determines the set of delays that expire first, which is guided by an (implicit) probabilistic choice. The race condition precludes the property of time additivity as the merger of stochastic delays alters this probabilistic behavior. To this end, we resolve the race condition using conditionally-distributed unit delays. We give a sound and ground-complete axiomatization of the process theory comprising the standard set of ACP-style operators. In this generalized setting, the alternative composition is no longer associative, so we have to resort to special normal forms that explicitly resolve the underlying race condition. Our treatment succeeds in the initial challenge to conservatively extend standard time with stochastic time. However, the {\textquoteleft}dissection{\textquoteright} of the stochastic delays to conditionally-distributed unit delays comes at a price, as we can no longer relate the resolved race condition to the original stochastic delays. We seek a solution in the field of probabilistic refinements that enable the interchange of probabilistic and nondeterministic choices.},
doi = {10.1007/s00165-012-0230-y},
author = {Jasen Markovski and Pedro R. D{\textquoteright}Argenio and Jos C. M. Baeten and Erik P. de Vink}
}
@conference {185,
title = {Taint Analysis of Security Code in the KLEE Symbolic Execution Engine},
booktitle = {Information and Communications Security - 14th International Conference, ICICS 2012, Hong Kong, China, October 29-31, 2012. Proceedings},
year = {2012},
pages = {264-275},
abstract = {We analyse the security of code by extending the KLEE symbolic execution engine with a tainting mechanism that tracks information flows of data. We consider both simple flows from direct assignment operations, and (more subtle) indirect flows inferred from the control flow. Our mechanism prevents overtainting by using a region-based static analysis provided by LLVM, the compiler infrastructure machine on which KLEE runs. We rigorously define taint propagation in a formal LLVM intermediate representation semantics, and show the correctness of our method. We illustrate the mechanism with several examples, showing how we use tainting to prove confidentiality and integrity properties.},
isbn = {978-3-642-34128-1},
doi = {10.1007/978-3-642-34129-8_23},
author = {Ricardo Corin and Felipe Andr{\'e}s Manzano},
editor = {Tat Wing Chim and Tsz Hon Yuen}
}
@conference {122,
title = {Tree rules in probabilistic transition system specifications with negative and quantitative premises},
booktitle = {Proceedings Combined 19th International Workshop on Expressiveness in Concurrency and 9th Workshop on Structured Operational Semantics},
series = {EPTCS},
volume = {89},
year = {2012},
pages = {115-130},
abstract = {Probabilistic transition system specifications (PTSSs) in the ntmufnu/ntmuxnu format provide structural operational semantics for Segala-type systems that exhibit both probabilistic and nondeterministic behavior and guarantee that isimilarity is a congruence.Similar to the nondeterministic case of rule format tyft/tyxt, we show that the well-foundedness requirement is unnecessary in the probabilistic setting. To achieve this, we first define an extended version of the ntmufnu/ntmuxnu format in which quantitative premises and conclusions include nested convex combinations of distributions. This format also guarantees that bisimilarity is a congruence. Then, for a given (possibly non-well-founded) PTSS in the new format, we construct an equivalent well-founded transition system consisting of only rules of the simpler (well-founded) probabilistic ntree format. Furthermore, we develop a proof-theoretic notion for these PTSSs that coincides with the existing stratification-based meaning in case the PTSS is stratifiable. This continues the line of research lifting structural operational semantic results from the nondeterministic setting to systems with both probabilistic and nondeterministic behavior.},
doi = {10.4204/EPTCS.89.9},
author = {Mat{\'\i}as David Lee and Daniel Gebler and Pedro R. D{\textquoteright}Argenio},
editor = {Bas Luttik and Michel A. Reniers}
}
@article {183,
title = {Verified Cryptographic Implementations for TLS},
journal = {ACM Trans. Inf. Syst. Secur.},
volume = {15},
year = {2012},
pages = {3},
abstract = {We narrow the gap between concrete implementations of cryptographic protocols and their verified models. We develop and verify a small functional implementation of the Transport Layer Security protocol (TLS 1.0). We make use of the same executable code for interoperability testing against mainstream implementations for automated symbolic cryptographic verification and automated computational cryptographic verification. We rely on a combination of recent tools and also develop a new tool for extracting computational models from executable code. We obtain strong security guarantees for TLS as used in typical deployments.},
doi = {10.1145/2133375.2133378},
author = {Karthikeyan Bhargavan and C{\'e}dric Fournet and Ricardo Corin and Eugen Zalinescu}
}
@article {126,
title = {Boolean factor Congruences and Property (*)},
journal = {International Journal of Algebra and Computation},
volume = {21},
number = {6},
year = {2011},
pages = {931-950},
abstract = {A variety V has Boolean factor congruences (BFC) if the set of factor congruences of any algebra in V is a distributive sublattice of its congruence lattice; this property holds in rings with unit and in every variety which has a semilattice operation. BFC has a prominent role in the study of uniqueness of direct product representations of algebras, since it is a strengthening of the refinement property.
We provide an explicit Mal{\textquoteright}cev condition for BFC. With the aid of this condition, it is shown that BFC is equivalent to a variant of the definability property (*), an open problem in Willard{\textquoteright}s work[9].},
doi = {10.1142/S021819671100656X},
author = {Pedro S{\'a}nchez Terraf}
}
@conference {186,
title = {Efficient Symbolic Execution for Analysing Cryptographic Protocol Implementations},
booktitle = {Engineering Secure Software and Systems - Third International Symposium, ESSoS 2011, Madrid, Spain, February 9-10, 2011. Proceedings},
year = {2011},
pages = {58-72},
abstract = {The analysis of code that uses cryptographic primitives is unfeasible with current state-of-the-art symbolic execution tools. We develop an extension that overcomes this limitation by treating certain concrete functions, like cryptographic primitives, as symbolic functions whose execution analysis is entirely avoided; their behaviour is in turn modelled formally via rewriting rules. We define concrete and symbolic semantics within a (subset) of the low-level virtual machine LLVM. We then show our approach sound by proving operational correspondence between the two semantics. We present a prototype to illustrate our approach and discuss next milestones towards the symbolic analysis of fully concurrent cryptographic protocol implementations.},
isbn = {978-3-642-19124-4},
doi = {10.1007/978-3-642-19125-1_5},
author = {Ricardo Corin and Felipe Andr{\'e}s Manzano},
editor = {{\'U}lfar Erlingsson and Roel Wieringa and Nicola Zannone}
}
@mastersthesis {173,
title = {Implementaci{\'o}n eficiente de construcciones de alto nivel para la programaci{\'o}n concurrente},
volume = {PhD},
year = {2011},
school = {Universidad Nacional de C{\'o}rdoba},
type = {PhD},
address = {C{\'o}rdoba},
abstract = {En el campo cient{\'\i}fico y tecnol{\'o}gico actual es de amplia aceptaci{\'o}n que el desarrollo de programas concurrentes es una tarea dif{\'\i}cil y adem{\'a}s necesaria. Aunque el inter{\'e}s en esta actividad no es nuevo, ha tomado particular relevancia en los {\'u}ltimos tiempos debido al desarrollo y popularidad de las arquitecturas paralelas.
Con el fin de aprovechar estas arquitecturas se han propuesto varias construcciones de alto nivel para los lenguajes de programaci{\'o}n, que funcionan como abstracciones sobre el hardware o el sistema operativo subyacente. En este sentido, las construcciones cl{\'a}sicas denominadas regiones cr{\'\i}ticas condicionales y su posterior evoluci{\'o}n en monitores de se{\~n}alamiento autom{\'a}tico, proveen abstracciones sobre el hardware o el sistema operativo subyacente que posibilitan el desarrollo de programas con variables compartidas y mecanismos de sincronizaci{\'o}n entre distintos procesos. Las mismas son herramientas interesante para desarrollar programas concurrentes de manera simple y de forma correcta. Lamentablemente estas propuestas no han sido incluidas como parte del dise{\~n}o de los lenguajes de programaci{\'o}n debido principalmente a problemas de eficiencia en sus implementaciones.
A partir de las nuevas posibilidades abiertas por el desarrollo reciente de las herramientas denominadas en forma gen{\'e}rica demostradores autom{\'a}ticos de teoremas, aplicadas dentro de t{\'e}cnicas de interpretaci{\'o}n abstracta, se presenta una nueva perspectiva en la implementaci{\'o}n eficiente de construcciones de alto nivel para la programaci{\'o}n concurrente. El trabajo de tesis doctoral aqu{\'\i} presentado aborda esta problem{\'a}tica mediante la adaptaci{\'o}n de estas metodolog{\'\i}as, no ya para verificar programas, si no para modificarlos haci{\'e}ndolos m{\'a}s eficientes. M{\'a}s espec{\'\i}ficamente, desarrollaremos m{\'e}todos autom{\'a}ticos para mejorar las implementaciones de regiones cr{\'\i}ticas condicionales y monitores con se{\~n}ala- miento autom{\'a}tico mediante el uso de aquellas t{\'e}cnicas y herramientas. De tal manera, intentaremos probar la factibilidad de estas construcciones cl{\'a}sicas para la programaci{\'o}n concurrente.},
author = {Dami{\'a}n Barsotti}
}
@conference {DBLP:conf/hybrid/FranzleHHWZ11,
title = {Measurability and safety verification for stochastic hybrid systems},
booktitle = {Proceedings of the 14th ACM International Conference on Hybrid Systems: Computation and Control, HSCC 2011},
year = {2011},
pages = {43-52},
publisher = {ACM},
organization = {ACM},
abstract = {Dealing with the interplay of randomness and continuous time is important for the formal verification of many real systems. Considering both facets is especially important for wireless sensor networks, distributed control applications, and many other systems of growing importance. An important traditional design and verification goal for such systems is to ensure that unsafe states can never be reached. In the stochastic setting, this translates to the question whether the probability to reach unsafe states remains tolerable. In this paper, we consider stochastic hybrid systems where the continuous-time behaviour is given by differential equations, as for usual hybrid systems, but the targets of discrete jumps are chosen by probability distributions. These distributions may be general measures on state sets. Also non-determinism is supported, and the latter is exploited in an abstraction and evaluation method that establishes safe upper bounds on reachability probabilities. To arrive there requires us to solve semantic intricacies as well as practical problems. In particular, we show that measurability of a complete system follows from the measurability of its constituent parts. On the practical side, we enhance tool support to work effectively on such general models. Experimental evidence is provided demonstrating the applicability of our approach on three case studies, tackled using a prototypical implementation.},
isbn = {978-1-4503-0629-4},
doi = {10.1145/1967701.1967710},
author = {Martin Fr{\"a}nzle and Ernst Moritz Hahn and Holger Hermanns and Nicol{\'a}s Wolovick and Lijun Zhang}
}
@article {DBLP:journals/mscs/BartheDR11,
title = {Secure information flow by self-composition},
journal = {Mathematical Structures in Computer Science},
volume = {21},
number = {6},
year = {2011},
pages = {1207-1252},
abstract = {Information flow policies are confidentiality policies that control information leakage through program execution. A common way to enforce secure information flow is through information flow type systems. Although type systems are compositional and usually enjoy decidable type checking or inference, their extensibility is very poor: type systems need to be redefined and proved sound for each new variation of security policy and programming language for which secure information flow verification is desired.
In contrast, program logics offer a general mechanism for enforcing a variety of safety policies, and for this reason are favoured in Proof Carrying Code, which is a promising security architecture for mobile code. However, the encoding of information flow policies in program logics is not straightforward because they refer to a relation between two program executions.
The purpose of this paper is to investigate logical formulations of secure information flow based on the idea of self-composition, which reduces the problem of secure information flow of a program $P$ to a safety property for a program $\hat{P}$ derived from $P$ by composing $P$ with a renaming of itself. Self-composition enables the use of standard techniques for information flow policy verification, such as program logics and model checking, that are suitable in Proof Carrying Code infrastructures.
We illustrate the applicability of self-composition in several settings, including different security policies such as non-interference and controlled forms of declassification, and programming languages including an imperative language with parallel composition, a non-deterministic language and, finally, a language with shared mutable data structures.},
doi = {10.1017/S0960129511000193},
author = {Gilles Barthe and Pedro R. D{\textquoteright}Argenio and Tamara Rezk}
}
@article {DBLP:journals/cleiej/LeeD11,
title = {Semantics for Interactive Sequential Systems and Non-Interference Properties},
journal = {CLEI Electron. J.},
volume = {14},
number = {3},
year = {2011},
abstract = {An interactive system is a system that allows communication with the users. This communication is modeled through input and output actions. Input actions are controllable by a user of the system, while output actions are controllable by the system. Standard semantics for sequential system [1,2], are not suitable in this context because they do not distinguish between the different kinds of actions. Applying a similar approach to the one used in [2] we define semantics for interactive systems. In this setting, a particular semantic is associated with a "notion of observability". These notions of observability are used as parameters of a general definition of non-interference. We show that some previous versions of the non-interference property based on traces semantic, weak bisimulation and refinement, are actually instances of the observability-based non-interference property presented here. Moreover, this allows us to show some results in a general way and to provide a better understanding of the security properties. },
url = {http://www.clei.cl/cleiej/paper.php?id=228},
author = {Mat{\'\i}as David Lee and Pedro R. D{\textquoteright}Argenio}
}
@article {125,
title = {Unprovability of the logical characterization of bisimulation},
journal = {Information and Computation},
volume = {209},
number = {7},
year = {2011},
pages = {1048-1056},
abstract = {We quickly review labelled Markov processes (LMP) and provide a counterexample showing that in general measurable spaces, event bisimilarity and state bisimilarity differ in LMP. This shows that the Hennessy{\textendash}Milner logic proposed by Desharnais does not characterize state bisimulation in non-analytic measurable spaces. Furthermore we show that, under current foundations of Mathematics, such logical characterization is unprovable for spaces that are projections of a coanalytic set. Underlying this construction there is a proof that stationary Markov processes over general measurable spaces do not have semi-pullbacks.},
doi = {10.1016/j.ic.2011.02.003},
author = {Pedro S{\'a}nchez Terraf}
}
@conference {182,
title = {Assume-guarantee Reasoning with ioco Testing Relation},
booktitle = {Proceedings of the 22nd IFIP International Conference on Testing Software and Systems: Short Papers},
year = {2010},
pages = {103-107},
abstract = {Compositional reasoning is typically based on assume-gua- rantee reasoning principles, which consider each component separately and take into account assumptions about the context of the component. This paper presents a combination of the assume-guarantee paradigm and ioco, a formal conformance relation for model-based testing that works on input-output transition systems (IOTS). We show that, un- der certain restrictions, assume-guarantee reasoning can be applied in the ioco context, enabling to check ioco-conformance by testing com- ponents{\textquoteright} system separately. We improve on previous results, where spec- ifications are required to be given as components, allowing the specifica- tions to be complete systems. Moreover, we prove that assume-guarantee reasoning can also be applied even when hiding internal communication between components.},
isbn = {978-2-89522-136-4},
url = {http://crim.ca/Publications/2010/documents/plein_texte/ASD_PetA_al_201010_ICTSS_sp.pdf},
author = {Laura Brand{\'a}n Briones}
}
@conference {DBLP:journals/corr/abs-1006-5096,
title = {Automatic Probabilistic Program Verification through Random Variable Abstraction},
booktitle = {Proceedings Eighth Workshop on Quantitative Aspects of Programming Languages, QAPL 2010},
volume = {EPTCS 28},
year = {2010},
pages = {34-47},
abstract = {The weakest pre-expectation calculus has been proved to be a mature theory to analyze quantitative properties of probabilistic and nondeterministic programs. We present an automatic method for proving quantitative linear properties on any denumerable state space using iterative backwards fixed point calculation in the general framework of abstract interpretation. In order to accomplish this task we present the technique of random variable abstraction (RVA) and we also postulate a sufficient condition to achieve exact fixed point computation in the abstract domain. The feasibility of our approach is shown with two examples, one obtaining the expected running time of a probabilistic program, and the other the expected gain of a gambling strategy.
Our method works on general guarded probabilistic and nondeterministic transition systems instead of plain pGCL programs, allowing us to easily model a wide range of systems including distributed ones and unstructured programs. We present the operational and weakest precondition semantics for this programs and prove its equivalence.},
doi = {10.4204/EPTCS.28.3},
author = {Dami{\'a}n Barsotti and Nicol{\'a}s Wolovick}
}
@mastersthesis {176,
title = {On the automatic verification of Distributed Probabilistic Automata with Partial Information},
volume = {PhD},
year = {2010},
school = {Universidad Nacional de C{\'o}rdoba},
type = {PhD},
address = {C{\'o}rdoba},
abstract = {We study concurrent systems involving probabilities and non-determinism. Specifically, we focus on the automatic verification of distributed systems, in which each component can access only a limited portion of the information in the system.
Although model checking algorithms for Markov decision processes (MDPs) can be applied to distributed systems, such algorithms assume that all components in the system have access to all the information. As a consequence, some correct distributed systems are deemed incorrect when we analyse them using algorithms for MDPs.
In this thesis, we present model checking algorithms for distributed systems involving probabilities and nondeterminism.
A relevant contribution is the result that there exists no algorithm to solve the model checking problem in a completely automated fashion. That is, there exist no algorithm so that, for all distributed systems and properties, the algorithm decides whether the property holds or not.
Despite of this result, we present two algorithms: one of these algorithms is able to detect that some systems are correct, while the other detects incorrect ones.
In addition, we present a new adaptation of the POR technique. Our adaptation profits from the fact that a component in a concurrent system has limited access to the information stored by other components. Our technique provides more effective reductions than those obtained using existing techniques for MDPs.
We conclude the thesis by presenting case studies in which our algorithms yield better results when compared to their counterparts for MDPs.},
author = {Sergio Giro}
}
@article {DBLP:journals/entcs/LeeD10,
title = {Describing Secure Interfaces with Interface Automata},
journal = {Electr. Notes Theor. Comput. Sci.},
volume = {264},
number = {1},
year = {2010},
pages = {107-123},
abstract = {Interface automata are a model that allows for the representation of stateful interfaces. In this paper we introduce a variant of interface automata, which we call interface structure for security (ISS), that allows for the modelling of security. We focus on the property of non interference, more precisely in bisimulation-based non interference for reactive systems. We define the notion of compatible interfaces in this setting meaning that they can be composed so that a secure interface can be synthesized from the composition. In fact, we provide an algorithm that determines whether an ISS can be made secure by controlling (more specifically, pruning) some public input actions, and if so, synthesize the secure ISS. In addition, we also provide some sufficient conditions on the components ISS to ensure that their composition is secure (and hence no synthesis process is needed).},
doi = {10.1016/j.entcs.2010.07.008},
author = {Mat{\'\i}as David Lee and Pedro R. D{\textquoteright}Argenio}
}
@conference {DBLP:conf/sccc/LeeD10,
title = {A Refinement Based Notion of Non-interference for Interface Automata: Compositionality, Decidability and Synthesis},
booktitle = {SCCC 2010, Proceedings of the XXIX International Conference of the Chilean Computer Science Society},
year = {2010},
pages = {280-289},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {Interface automata (IA) introduce a framework to model stateful interfaces. Interface structures for security (ISS) extend IA to cope with security properties. In this article, we argue that bisimulation-based non interference is not quite appropriate to characterize security on ISS. We instead introduce refinement-based variants of non-interference that fit better in this context. Moreover, we show that these new properties are not preserved by composition, but give sufficient conditions to ensure compositionality. We give two algorithms. The first one determines if an ISS satisfies the refinement-based non-interference property. The second one, determines if an ISS can be made secure by controlling some input actions and, if so, synthesizes the secure ISS.},
doi = {10.1109/SCCC.2010.14},
author = {Mat{\'\i}as David Lee and Pedro R. D{\textquoteright}Argenio}
}
@conference {DBLP:conf/spin/CalinCDHZ10,
title = {Time-Bounded Reachability in Distributed Input/Output Interactive Probabilistic Chains},
booktitle = {Model Checking Software - 17th International SPIN Workshop},
volume = {LNCS 6349},
year = {2010},
pages = {193-211},
publisher = {Springer},
organization = {Springer},
abstract = {We develop an algorithm to compute timed reachability probabilities for distributed models which are both probabilistic and nondeterministic. To obtain realistic results we consider the recently introduced class of (strongly) distributed schedulers, for which no analysis techniques are known.
Our algorithm is based on reformulating the nondeterministic models as parametric ones, by interpreting scheduler decisions as parameters. We then apply the PARAM tool to extract the reachability probability as a polynomial function, which we optimize using nonlinear programming.},
doi = {10.1007/978-3-642-16164-3_15},
author = {Georgel Calin and Pepijn Crouzen and Pedro R. D{\textquoteright}Argenio and Ernst Moritz Hahn and Lijun Zhang}
}
@conference {188,
title = {Cryptographic Protocol Synthesis and Verification for Multiparty Sessions},
booktitle = {Proceedings of the 22nd IEEE Computer Security Foundations Symposium, CSF 2009, Port Jefferson, New York, USA, July 8-10, 2009},
year = {2009},
pages = {124-140},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {We present the design and implementation of a compiler that, given high-level multiparty session descriptions, generates custom cryptographic protocols. Our sessions specify pre-arranged patterns of message exchanges and data accesses between distributed participants. They provide each participant with strong security guarantees for all their messages. Our compiler generates code for sending and receiving these messages, with cryptographic operations and checks, in order to enforce these guarantees against any adversary that may control both the network and some session participants. We verify that the generated code is secure by relying on a recent type system for cryptography. Most of the proof is performed by mechanized type checking and does not rely on the correctness of our compiler.We obtain the strongest session security guarantees to date in a model that captures the executable details of protocol code. We illustrate and evaluate our approach on a series of protocols inspired by web services.
},
isbn = {978-0-7695-3712-2},
doi = {10.1109/CSF.2009.26},
author = {Karthikeyan Bhargavan and Ricardo Corin and Pierre-Malo Deni{\'e}lou and C{\'e}dric Fournet and James J. Leifer}
}
@article {DBLP:journals/entcs/GiroD09,
title = {On the Expressive Power of Schedulers in Distributed Probabilistic Systems},
journal = {Electr. Notes Theor. Comput. Sci.},
volume = {253},
number = {3},
year = {2009},
pages = {45-71},
abstract = {In this paper, we consider several subclasses of distributed schedulers and we investigate the ability of these subclasses to attain worst-case probabilities.
Based on previous work, we consider the class of distributed schedulers, and we prove that randomization adds no extra power to distributed schedulers when trying to attain the supremum probability of any measurable set, thus showing that the subclass of deterministic schedulers suffices to attain the worst-case probability. Traditional schedulers are a particular case of distributed schedulers. So, since our result holds for any measurable set, our proof generalizes the well-known result that randomization adds no extra power to schedulers when trying to maximize the probability of an ω-regular language. However, non-Markovian schedulers are needed to attain supremum probabilities in distributed systems.
We develop another class of schedulers (the strongly distributed schedulers) that restricts the nondeterminism concerning the order in which components execute. We compare this class against previous approaches in the same direction, showing that our definition is an important contribution. For this class, we show that randomized and non-Markovian schedulers are needed to attain worst-case probabilities.
We also discuss the subclass of finite-memory schedulers, showing the intractability of the model checking problem for these schedulers.},
doi = {10.1016/j.entcs.2009.10.005},
author = {Sergio Giro and Pedro R. D{\textquoteright}Argenio}
}
@conference {DBLP:conf/qest/DArgenioWTC09,
title = {Nondeterministic Labeled Markov Processes: Bisimulations and Logical Characterization},
booktitle = {QEST 2009, Sixth International Conference on the Quantitative Evaluation of Systems},
year = {2009},
pages = {11-20},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {We extend the theory of labeled Markov processeswith internal nondeterminism, a fundamental conceptfor the further development of a process theory withabstraction on nondeterministic continuous probabilisticsystems. We define nondeterministic labeled Markovprocesses (NLMP) and provide both a state basedbisimulation and an event based bisimulation. We showthe relation between them, including that the largeststate bisimulation is also an event bisimulation. Wealso introduce a variation of the Hennessy-Milnerlogic that characterizes event bisimulation and that issound w.r.t. the state base bisimulation for arbitraryNLMP. This logic, however, is infinitary as it containsa denumerable ∨. We then introduce a finitary sublogicthat characterize both state and event bisimulation forimage finite NLMP whose underlying measure spaceis also analytic. Hence, in this setting, all notions ofbisimulation we deal with turn out to be equal.},
isbn = {978-0-7695-3808-2},
doi = {10.1109/QEST.2009.17},
author = {Pedro R. D{\textquoteright}Argenio and Nicol{\'a}s Wolovick and Pedro S{\'a}nchez Terraf and Pablo Celayes}
}
@conference {DBLP:conf/icst/WolovickDQ09,
title = {Optimizing Probabilities of Real-Time Test Case Execution},
booktitle = {ICST 2009, Second International Conference on Software Testing Verification and Validation},
year = {2009},
pages = {446-455},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {Model-based test derivation for real-time system has been proven to be a hard problem for exhaustive test suites. Therefore, techniques for real-time testing do not aim to exhaustiveness but instead respond to particular coverage criteria. Since it is not feasible to generate complete test suites for real time systems, it is very important that test cases are executed in a way that they can achieve the best possible result. As a consequence, it is imperative to increase the probability of success of a test case execution (by {\oe}success{\textquoteright} we actually mean {\oe}the test finds an error{\textquoteright}). This work presents a technique to guide the execution of a test case towards a particular objective with the highest possible probability. The technique takes as a starting point a model described in terms of an input/output stochastic automata, where input actions are fully controlled by the tester and the occurrence time of output action responds to uniform distributions. Derived test cases are sequences of inputs and outputs actions. This work discusses several techniques to obtain the optimum times in which the tester must feed the inputs of the test case in order to achieve maximum probability of success in a test case execution. In particular, we show this optimization problem is equivalent to maximizing the sectional volume of a convex polytope when the probability distributions involved are uniform.},
isbn = {978-0-7695-3601-9},
doi = {10.1109/ICST.2009.59},
author = {Nicol{\'a}s Wolovick and Pedro R. D{\textquoteright}Argenio and Hongyang Qu}
}
@conference {DBLP:conf/concur/GiroDF09,
title = {Partial Order Reduction for Probabilistic Systems: A Revision for Distributed Schedulers},
booktitle = {CONCUR 2009 - Concurrency Theory, 20th International Conference},
volume = {LNCS 5710},
year = {2009},
pages = {338-353},
publisher = {Springer},
organization = {Springer},
abstract = {The technique of partial order reduction (POR) for probabilistic model checking prunes the state space of the model so that a maximizing scheduler and a minimizing one persist in the reduced system. This technique extends Peled{\textquoteright}s original restrictions with a new one specially tailored to deal with probabilities. It has been argued that not all schedulers provide appropriate resolutions of nondeterminism and they yield overly safe answers on systems of distributed nature or that partially hide information. In this setting, maximum and minimum probabilities are obtained considering only the subset of so-called distributed or partial information schedulers. In this article we revise the technique of partial order reduction (POR) for LTL properties applied to probabilistic model checking. Our reduction ensures that distributed schedulers are preserved. We focus on two classes of distributed schedulers and show that Peled{\textquoteright}s restrictions are valid whenever schedulers use only local information. We show experimental results in which the elimination of the extra restriction leads to significant improvements.
},
isbn = {978-3-642-04080-1},
doi = {10.1007/978-3-642-04081-8_23},
author = {Sergio Giro and Pedro R. D{\textquoteright}Argenio and Luis Mar{\'\i}a {Ferrer Fioriti}}
}
@proceedings {DBLP:journals/corr/abs-0912-2128,
title = {Proceedings First Workshop on Quantitative Formal Methods: Theory and Applications},
journal = {QFM},
volume = {13},
year = {2009},
doi = {10.4204/EPTCS.13},
url = {http://arxiv.org/abs/0912.2128},
editor = {Suzana Andova and Annabelle McIver and Pedro R. D{\textquoteright}Argenio and Pieter J. L. Cuijpers and Jasen Markovski and Carroll Morgan and Manuel N{\'u}{\~n}ez}
}
@conference {187,
title = {Secure Enforcement for Global Process Specifications},
booktitle = {CONCUR 2009 - Concurrency Theory, 20th International Conference, CONCUR 2009, Bologna, Italy, September 1-4, 2009. Proceedings},
year = {2009},
pages = {511-526},
publisher = {Springer},
organization = {Springer},
abstract = {Distributed applications may be specified as parallel compositions of processes that summarize their global interactions and hide local implementation details. These processes define a fixed protocol (also known as a contract, or a session) which may be used for instance to compile or verify code for these applications.
Security is a concern when the runtime environment for these applications is not fully trusted. Then, parts of their implementation may run on remote, corrupted machines, which do not comply with the global process specification. To mitigate this concern, one may write defensive implementations that monitor the application run and perform cryptographic checks. However, hand crafting such implementations is ad hoc and error-prone.
We develop a theory of secure implementations for process specifications. We propose a generic defensive implementation scheme, relying on history-tracking mechanisms, and we identify sufficient conditions on processes, expressed as a new type system, that ensure that our implementation is secure for all integrity properties. We illustrate our approach on a series of examples and special cases, including an existing implementation for sequential multiparty sessions.},
isbn = {978-3-642-04080-1},
doi = {10.1007/978-3-642-04081-8_34},
author = {J{\'e}r{\'e}my Planul and Ricardo Corin and C{\'e}dric Fournet},
editor = {Mario Bravetti and Gianluigi Zavattaro}
}
@conference {DBLP:conf/sbmf/Giro09,
title = {Undecidability Results for Distributed Probabilistic Systems},
booktitle = {Formal Methods: Foundations and Applications, 12th Brazilian Symposium on Formal Methods, SBMF 2009},
volume = {LNCS 5902},
year = {2009},
pages = {220-235},
publisher = {Springer},
organization = {Springer},
abstract = {In the verification of concurrent systems involving probabilities, the aim is to find out the maximum/minimum probability that a given event occurs (examples of such events being {\textquotedblleft}the system reaches a failure state{\textquotedblright},{\textquotedblleft}a message is delivered{\textquotedblright}). Such extremal probabilities are obtained by quantifying over all the possible ways in which the processes may be interleaved. Interleaving choices are considered a particular case of nondeterministic behaviour. Such behaviour is dealt with by considering schedulers that resolve the nondeterministic choices. Each scheduler determines a Markov chain for which actual probabilities can be calculated. In the recent literature on distributed systems, particular attention has been paid to the fact that, in order to obtain accurate results, the analysis must rely on partial information schedulers, instead of full-history dependent schedulers used in the setting of Markov decision processes. In this paper, we present undecidability results for distributed schedulers. These schedulers were devised in previous works, and aim to capture the fact that each process has partial information about the actual state of the system. Some of the undecidability results we present are particularly impressive: in the setting of total information the same problems are inexpensive and, indeed, they are used as preprocessing steps in more general model checking algorithms.},
isbn = {978-3-642-10451-0},
doi = {10.1007/978-3-642-10452-7},
author = {Sergio Giro}
}
@article {195,
title = {Varieties with Definable Factor Congruences},
journal = {Trans. Amer. Math. Soc.},
volume = {361},
year = {2009},
pages = {5061{\textendash}5088},
author = {Pedro S{\'a}nchez Terraf and Vaggione, Diego}
}
@conference {DBLP:conf/sac/GiroD09,
title = {On the verification of probabilistic I/O automata with unspecified rates},
booktitle = {Proceedings of the 2009 ACM Symposium on Applied Computing (SAC)},
year = {2009},
pages = {582-586},
publisher = {ACM},
organization = {ACM},
abstract = {We consider the Probabilistic I/O Automata framework, for which we address the verification of reachability properties in case the rates (also called delay parameters) are unspecified. We show that the problem of finding (or even approximating) the supremum probability that a set of states is reached is undecidable. However, we give an algorithm to obtain a non-trivial over-estimation of this value. We explain why this over-estimation may result useful for many systems. Finally, in order to compare our approach against Markov Decision Processes, we study a simple protocol for anonymous fair service. In this case, the over-estimation computed over the PIOA gives a more realistic result than the exact computation over the MDP.},
isbn = {978-1-60558-166-8},
doi = {10.1145/1529282.1529406},
author = {Sergio Giro and Pedro R. D{\textquoteright}Argenio}
}
@book {172,
title = {Calculo de programas},
year = {2008},
publisher = {Universidad Nacional de Cordoba},
organization = {Universidad Nacional de Cordoba},
address = {C{\'o}rdoba},
abstract = {La programaci{\'o}n es una actividad que ofrece desaf{\'\i}os intelectuales interesantes, en la cual se combinan arm{\'o}nicamente la creatividad y el razonamiento riguroso. Lamentablemente no siempre es ense{\~n}ada de esta manera. Muchos cursos de introducci{\'o}n a la programaci{\'o}n se basan en el {\textquotedblleft}m{\'e}todo{\textquotedblright} de ensayo y error. Las construcciones de los lenguajes de programaci{\'o}n son presentadas s{\'o}lo operacionalmente, se estudia un conjunto de ejemplos y se espera resolver problemas nuevos por analog{\'\i}a, a{\'u}n cuando estos problemas sean radicalmente diferentes de los presentados. La presencia de errores es en estos casos m{\'a}s la regla que la excepci{\'o}n.
Existe, afortunadamente, otra forma de aproximarse a la programaci{\'o}n. Los programas pueden ser desarrollados de manera met{\'o}dica a partir de especificaciones, construyendo a la vez el programa y su demostraci{\'o}n, con esta {\'u}ltima guiando el proceso. Adem{\'a}s de su utilidad pr{\'a}ctica, este ejercicio intelectual es altamente instructivo y vuelve a la programaci{\'o}n una tarea creativa e interesante.
En este punto la l{\'o}gica matem{\'a}tica es una herramienta indispensable como ayuda para la especificaci{\'o}n y desarrollo de programas. Los programas son f{\'o}rmulas l{\'o}gicas tan complejas y voluminosas que fue dif{\'\i}cil reconocerlos como tales. Este libro comienza presentando un estilo de l{\'o}gica orientado a trabajar de manera m{\'a}s adecuada con f{\'o}rmulas de gran tama{\~n}o. A partir de su estudio se mostrar{\'a} c{\'o}mo desarrollar programas funcionales e imperativos utilizando este formalismo de manera unificada.},
isbn = {9789503306420},
author = {Javier O. Blanco and Silvina Smith and Dami{\'a}n Barsotti}
}
@article {127,
title = {Directly Indecomposables in Semidegenerate Varieties of Connected po-Groupoids},
journal = {Order},
volume = {25},
number = {4},
year = {2008},
pages = {377-386},
abstract = {We study varieties with a term-definable poset structure, po-groupoids. It is known that connected posets have the strict refinement property (SRP). In S{\'a}nchez Terraf and Vaggione (Trans Am Math Soc, in press) it is proved that semidegenerate varieties with the SRP have definable factor congruences and if the similarity type is finite, directly indecomposables are axiomatizable by a set of first-order sentences. We obtain such a set for semidegenerate varieties of connected po-groupoids and show its quantifier complexity is bounded in general.},
doi = {10.1007/s11083-008-9101-9},
author = {Pedro S{\'a}nchez Terraf}
}
@conference {DBLP:conf/hvc/AndresDR08,
title = {Significant Diagnostic Counterexamples in Probabilistic Model Checking},
booktitle = {Hardware and Software: Verification and Testing, 4th International Haifa Verification Conference, HVC 2008},
volume = {LNCS 5394},
year = {2008},
pages = {129-148},
publisher = {Springer},
organization = {Springer},
abstract = {This paper presents a novel technique for counterexample generation in probabilistic model checking of Markov chains and Markov Decision Processes. (Finite) paths in counterexamples are grouped together in witnesses that are likely to provide similar debugging information to the user. We list five properties that witnesses should satisfy in order to be useful as debugging aid: similarity, accuracy, originality, significance, and finiteness. Our witnesses contain paths that behave similarly outside strongly connected components.
Then, we show how to compute these witnesses by reducing the problem of generating counterexamples for general properties over Markov Decision Processes, in several steps, to the easy problem of generating counterexamples for reachability properties over acyclic Markov chains.},
isbn = {978-3-642-01701-8},
doi = {10.1007/978-3-642-01702-5_15},
author = {Miguel E. Andr{\'e}s and Pedro R. D{\textquoteright}Argenio and Peter van Rossum}
}
@conference {171,
title = {Automatic Refinement of Split Binary Semaphore},
booktitle = {Theoretical Aspects of Computing - ICTAC 2007, 4th International Colloquium, Macau, China, September 26-28, 2007, Proceedings},
volume = {4711},
number = {Lecture Notes in Computer Science},
year = {2007},
pages = {64-78},
publisher = {Springer},
organization = {Springer},
abstract = {Binary semaphores can be used to implement conditional critical regions by using the split binary semaphore (SBS) technique. Given a specification of a conditional critical regions problem, the SBS technique provides not only the resulting programs but also some invariants which ensure the correctness of the solution. The programs obtained in this way are generally not efficient. However, they can be optimized by strengthening these invariants and using them to eliminate unnecessary tests.
We present a mechanical method to perform these optimizations. The idea is to use the backward propagation technique over a guarded transition system that models the behavior of the programs generated by the SBS. This process needs proving heavy implications and simplifying growing invariants. Our method automatically entrusts these tasks to the Isabelle theorem prover and the CVC Lite validity checker. We have tested our method on a number of classical examples from concurrent programming.},
isbn = {978-3-540-75290-5},
doi = {10.1007/978-3-540-75292-9_5},
author = {Dami{\'a}n Barsotti and Javier O. Blanco},
editor = {Cliff B. Jones and Zhiming Liu and Jim Woodcock}
}
@conference {DBLP:conf/formats/GiroD07,
title = {Quantitative Model Checking Revisited: Neither Decidable Nor Approximable},
booktitle = {Formal Modeling and Analysis of Timed Systems, 5th International Conference, FORMATS 2007},
volume = {LNCS 4763},
year = {2007},
pages = {179-194},
publisher = {Springer},
organization = {Springer},
abstract = {Quantitative model checking computes the probability values of a given property quantifying over all possible schedulers. It turns out that maximum and minimum probabilities calculated in such a way are overestimations on models of distributed systems in which components are loosely coupled and share little information with each other (and hence arbitrary schedulers may result too powerful). Therefore, we focus on the quantitative model checking problem restricted to distributed schedulers that are obtained only as a combination of local schedulers (i.e. the schedulers of each component) and show that this problem is undecidable. In fact, we show that there is no algorithm that can compute an approximation to the maximum probability of reaching a state within a given bound when restricted to distributed schedulers.},
isbn = {978-3-540-75453-4},
doi = {10.1007/978-3-540-75454-1_14},
author = {Sergio Giro and Pedro R. D{\textquoteright}Argenio}
}
@article {DBLP:journals/fac/BarsottiNT07,
title = {Verification of clock synchronization algorithms: experiments on a combination of deductive tools},
journal = {Formal Asp. Comput.},
volume = {19},
number = {3},
year = {2007},
pages = {321-341},
abstract = {We report on an experiment in combining the theorem prover Isabelle with automatic first-order arithmetic provers to increase automation on the verification of distributed protocols. As a case study for the experiment we verify several averaging clock synchronization algorithms. We present a formalization of Schneider{\textquoteright}s generalized clock synchronization protocol [Sch87] in Isabelle/HOL. Then, we verify that the convergence functions used in two clock synchronization algorithms, namely, the Interactive Convergence Algorithm (ICA) of Lamport and Melliar-Smith [LMS85] and the Fault-tolerant Midpoint algorithm of Lundelius{\textendash}Lynch [LL84], satisfy Schneider{\textquoteright}s general conditions for correctness. The proofs are completely formalized in Isabelle/HOL. We identify parts of the proofs which are not fully automatically proven by Isabelle built-in tactics and show that these proofs can be handled by automatic first-order provers with support for arithmetics.},
doi = {10.1007/s00165-007-0027-6},
author = {Dami{\'a}n Barsotti and Leonor Prensa Nieto and Alwen Tiu}
}
@conference {DBLP:conf/formats/WolovickJ06,
title = {A Characterization of Meaningful Schedulers for Continuous-Time Markov Decision Processes},
booktitle = {Formal Modeling and Analysis of Timed Systems, 4th International Conference, FORMATS 2006},
volume = {LNCS 4202},
year = {2006},
pages = {352-367},
publisher = {Springer},
organization = {Springer},
abstract = {Continuous-time Markov decision process are an important variant of labelled transition systems having nondeterminism through labels and stochasticity through exponential fire-time distributions. Nondeterministic choices are resolved using the notion of a scheduler. In this paper we characterize the class of measurable schedulers, which is the most general one, and show how a measurable scheduler induces a unique probability measure on the sigma-algebra of infinite paths. We then give evidence that for particular reachability properties it is sufficient to consider a subset of measurable schedulers. Having analyzed schedulers and their induced probability measures we finally show that each probability measure on the sigma-algebra of infinite paths is indeed induced by a measurable scheduler which proves that this class is complete.},
isbn = {3-540-45026-2},
doi = {10.1007/11867340_25},
author = {Nicol{\'a}s Wolovick and Sven Johr}
}
@article {DBLP:journals/tse/BohnenkampDHK06,
title = {MODEST: A Compositional Modeling Formalism for Hard and Softly Timed Systems},
journal = {IEEE Trans. Software Eng.},
volume = {32},
number = {10},
year = {2006},
pages = {812-830},
abstract = {This paper presents Modest (MOdeling and DEscription language for Stochastic Timed systems), a formalism that is intended to support 1) the modular description of reactive systems{\textquoteright} behavior while covering both 2) functional and 3) nonfunctional system aspects such as timing and quality-of-service constraints in a single specification. The language contains features such as simple and structured data types, structuring mechanisms like parallel composition and abstraction, means to control the granularity of assignments, exception handling, and nondeterministic and random branching and timing. Modest can be viewed as an overarching notation for a wide spectrum of models, ranging from labeled transition systems to timed automata (and probabilistic variants thereof), as well as prominent stochastic processes such as (generalized semi-)Markov chains and decision processes. The paper describes the design rationales and details of the syntax and semantics.},
doi = {10.1109/TSE.2006.104},
author = {Henrik C. Bohnenkamp and Pedro R. D{\textquoteright}Argenio and Holger Hermanns and Joost-Pieter Katoen}
}
@article {DBLP:journals/entcs/BaierDG06,
title = {Partial Order Reduction for Probabilistic Branching Time},
journal = {Electr. Notes Theor. Comput. Sci.},
volume = {153},
number = {2},
year = {2006},
pages = {97-116},
abstract = {In the past, partial order reduction has been used successfully to combat the state explosion problem in the context of model checking for non-probabilistic systems. For both linear time and branching time specifications, methods have been developed to apply partial order reduction in the context of model checking. Only recently, results were published that give criteria on applying partial order reduction for verifying quantitative linear time properties for probabilistic systems. This paper presents partial order reduction criteria for Markov decision processes and branching time properties, such as formulas of probabilistic computation tree logic. Moreover, we provide a comparison of the results established so far about reduction conditions for Markov decision processes.},
doi = {10.1016/j.entcs.2005.10.034},
author = {Christel Baier and Pedro R. D{\textquoteright}Argenio and Marcus Gr{\"o}{\ss}er}
}
@article {DBLP:journals/iandc/LohreyDH05,
title = {Axiomatising divergence},
journal = {Inf. Comput.},
volume = {203},
number = {2},
year = {2005},
pages = {115-144},
abstract = {When a process is capable of executing an unbounded number of non-observable actions it is said to be divergent. Different capabilities of an observer to identify this phenomena along the execution leads to different divergent sensitive semantics. This paper develops sound and complete axiomatisations for the divergence sensitive spectrum of weak bisimulation equivalence. The axiomatisations separates the axioms concerning recursion and those that capture the essence of diverging behaviour.},
doi = {10.1016/j.ic.2005.05.007},
author = {Markus Lohrey and Pedro R. D{\textquoteright}Argenio and Holger Hermanns}
}
@conference {DBLP:conf/concur/DArgenioG05,
title = {The Coarsest Congruence for Timed Automata with Deadlines Contained in Bisimulation},
booktitle = {CONCUR 2005 - Concurrency Theory, 16th International Conference},
series = {Lecture Notes in Computer Science},
volume = {LNCS 3653},
year = {2005},
pages = {125-140},
publisher = {Springer},
organization = {Springer},
abstract = {Delaying the synchronization of actions may reveal some hidden behavior that would not happen if the synchronization met the specified deadlines. This precise phenomenon makes bisimulation fail to be a congruence for the parallel composition of timed automata with deadlines, a variant of timed automata where time progress is controlled by deadlines imposed on each transition. This problem has been known and unsolved for several years. In this paper we give a characterization of the coarsest congruence that is included in the bisimulation relation. In addition, a symbolic characterization of such relation is provided and shown to be decidable. We also discuss the pitfalls of existing parallel compositions in this setting and argue that our definition is both reasonable and sufficiently expressive as to consider the modeling of both soft and hard real-time constraints.},
isbn = {3-540-28309-9},
doi = {10.1007/11539452_13},
author = {Pedro R. D{\textquoteright}Argenio and Biniam Gebremichael}
}
@article {DBLP:journals/iandc/DArgenioK05,
title = {A theory of stochastic systems part I: Stochastic automata},
journal = {Inf. Comput.},
volume = {203},
year = {2005},
pages = {1-38},
abstract = {This paper presents the theoretical underpinning of a model for symbolically representing probabilistic transition systems, an extension of labelled transition systems for the modelling of general (discrete as well as continuous or singular) probability spaces. These transition systems are particularly suited for modelling softly timed systems, real-time systems in which the time constraints are of random nature. For continuous probability spaces these transition systems are infinite by nature. Stochastic automata represent their behaviour in a finite way. This paper presents the model of stochastic automata, their semantics in terms of probabilistic transition systems, and studies several notions of bisimulation. Furthermore, the relationship of stochastic automata to generalised semi-Markov processes is established.},
doi = {10.1016/j.ic.2005.07.001},
author = {Pedro R. D{\textquoteright}Argenio and Joost-Pieter Katoen}
}
@article {DBLP:journals/iandc/DArgenioK05a,
title = {A theory of Stochastic systems. Part II: Process algebra},
journal = {Inf. Comput.},
volume = {203},
number = {1},
year = {2005},
pages = {39-74},
abstract = {This paper introduces (pronounce spades), a stochastic process algebra for discrete event systems, that extends traditional process algebra with timed actions whose delay is governed by general (a.o. continuous) probability distributions. The operational semantics is defined in terms of stochastic automata, a model that uses clocks{\textemdash}like in timed automata{\textemdash}to symbolically represent randomly timed systems, cf. the accompanying paper [P.R. D{\textquoteright}Argenio, J.-P. Kateon, A theory of stochastic systems. Part I: Stochastic automata. Inf. Comput. (2005), to appear]. We show that stochastic automata and are equally expressive, and prove that the operational semantics of a term up to α-conversion of clocks, is unique (modulo symbolic bisimulation). (Open) probabilistic and structural bisimulation are proven to be congruences for, and are equipped with an equational theory. The equational theory is shown to be complete for structural bisimulation and allows to derive an expansion law.},
doi = {10.1016/j.ic.2005.07.002},
author = {Pedro R. D{\textquoteright}Argenio and Joost-Pieter Katoen}
}
@conference {DBLP:conf/qest/DArgenioN04,
title = {Partial Order Reduction on Concurrent Probabilistic Programs},
booktitle = {1st International Conference on Quantitative Evaluation of Systems (QEST 2004)},
year = {2004},
pages = {240-249},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {Partial order reduction has been used to alleviate the state explosion problem in model checkers for nondeterministic systems. The method relies on exploring only a fragment of the full state space of a program that is enough to assess the validity of a property. In this paper, we discuss partial order reduction for probabilistic programs represented as Markov decision processes. The technique we propose preserves probabilistic quantification of reachability properties and is based on partial order reduction techniques for (non probabilistic) branching temporal logic. We also show that techniques for (non probabilistic) linear temporal logics are not correct for probabilistic reachability and that in turn our method is not sufficient for probabilistic CTL. We conjecture that our reduction technique also preserves maximum and minimum probabilities of next-free LTL properties.
},
isbn = {0-7695-2185-1},
doi = {10.1109/QEST.2004.10031},
author = {Pedro R. D{\textquoteright}Argenio and Peter Niebert}
}
@conference {DBLP:conf/csfw/BartheDR04,
title = {Secure Information Flow by Self-Composition},
booktitle = {17th IEEE Computer Security Foundations Workshop, (CSFW-17 2004)},
year = {2004},
pages = {100-114},
publisher = {IEEE Computer Society},
organization = {IEEE Computer Society},
abstract = {Non-interference is a high-level security property that guarantees the absence of illicit information leakages through executing programs. More precisely, non-interference for a program assumes a separation between secret inputs and public inputs on the one hand, and secret outputs and public outputs on the other hand, and requires that the value of public outputs does not depend on the value of secret inputs. A common means to enforce non-interference is to use an information flow type system. However, such type systems are inherently imprecise, and reject many secure programs, even for simple programming languages. The purpose of this paper is to investigate logical formulations of non-interference that allow a more precise analysis of programs. It appears that such formulations are often sound an complete, and also amenable to interactive or automated verification techniques, such as theorem-proving or model-checking.
We illustrate the applicability of our method in several scenarii, including a simple imperative language, a non-deterministic language, and finally a language with shared mutable data structures.
},
isbn = {0-7695-2169-X},
doi = {10.1109/CSFW.2004.17},
author = {Gilles Barthe and Pedro R. D{\textquoteright}Argenio and Tamara Rezk}
}
@conference {DBLP:conf/voss/BravettiD04,
title = {Tutte le Algebre Insieme: Concepts, Discussions and Relations of Stochastic Process Algebras with General Distributions},
booktitle = {Validation of Stochastic Systems - A Guide to Current Research},
volume = {LNCS 2925},
year = {2004},
pages = {44-88},
publisher = {Springer},
organization = {Springer},
abstract = {We report on the state of the art in the formal specification and analysis of concurrent systems whose activity duration depends on general probability distributions. First of all the basic notions and results introduced in the literature are explained and, on this basis, a conceptual classification of the different approaches is presented. We observe that most of the approaches agree on the fact that the specification of systems with general distributions has a three level structure: the process algebra level, the level of symbolic semantics and the level of concrete semantics. Based on such observations, a new very expressive model is introduced for representing timed systems with general distributions. We show that many of the approaches in the literature can be mapped into this model establishing therefore a formal framework to compare these approaches.},
isbn = {3-540-22265-0},
doi = {10.1007/978-3-540-24611-4_2},
author = {Mario Bravetti and Pedro R. D{\textquoteright}Argenio}
}
@article {DBLP:journals/jlp/DArgenioB03,
title = {Guest editors{\textquoteright} introduction: Special issue on Probabilistic Techniques for the Design and Analysis of Systems},
journal = {J. Log. Algebr. Program.},
volume = {56},
number = {1-2},
year = {2003},
pages = {1-2},
doi = {10.1016/S1567-8326(02)00064-4},
author = {Pedro R. D{\textquoteright}Argenio and Jos C. M. Baeten}
}