Storm
A Modern Probabilistic Model Checker
Loading...
Searching...
No Matches
ChoiceSelector.cpp
Go to the documentation of this file.
5
8
10
11namespace storm {
12namespace transformer {
13template<typename ValueType, typename RewardModelType>
14std::shared_ptr<storm::models::sparse::NondeterministicModel<ValueType, RewardModelType>> ChoiceSelector<ValueType, RewardModelType>::transform(
15 storm::storage::BitVector const& enabledActions) const {
16 storm::storage::sparse::ModelComponents<ValueType, RewardModelType> newComponents(inputModel.getTransitionMatrix().restrictRows(enabledActions));
17 newComponents.stateLabeling = inputModel.getStateLabeling();
18 for (auto const& rewardModel : inputModel.getRewardModels()) {
19 newComponents.rewardModels.emplace(rewardModel.first, rewardModel.second.restrictActions(enabledActions));
20 }
21 if (inputModel.hasChoiceLabeling()) {
22 newComponents.choiceLabeling = inputModel.getChoiceLabeling().getSubLabeling(enabledActions);
23 }
24 newComponents.stateValuations = inputModel.getOptionalStateValuations();
25 if (inputModel.hasChoiceOrigins()) {
26 newComponents.choiceOrigins = inputModel.getChoiceOrigins()->selectChoices(enabledActions);
27 }
28
29 if (inputModel.isOfType(storm::models::ModelType::MarkovAutomaton)) {
30 auto const& ma = *inputModel.template as<storm::models::sparse::MarkovAutomaton<ValueType, RewardModelType>>();
31 newComponents.markovianStates = ma.getMarkovianStates();
32 newComponents.exitRates = ma.getExitRates();
33 return std::make_shared<storm::models::sparse::MarkovAutomaton<ValueType, RewardModelType>>(std::move(newComponents));
34 } else if (inputModel.getType() == storm::models::ModelType::Pomdp) {
35 newComponents.observabilityClasses = static_cast<storm::models::sparse::Pomdp<ValueType, RewardModelType> const&>(inputModel).getObservations();
36 return std::make_shared<storm::models::sparse::Pomdp<ValueType, RewardModelType>>(std::move(newComponents));
37 } else {
38 STORM_LOG_THROW(inputModel.isOfType(storm::models::ModelType::Mdp), storm::exceptions::UnexpectedException,
39 "Unexpected model type for choice selector.");
40 return std::make_shared<storm::models::sparse::Mdp<ValueType, RewardModelType>>(std::move(newComponents));
41 }
42}
43
44template class ChoiceSelector<double>;
46} // namespace transformer
47} // namespace storm
This class represents a partially observable Markov decision process.
Definition Pomdp.h:15
A bit vector that is internally represented as a vector of 64-bit values.
Definition BitVector.h:18
std::shared_ptr< storm::models::sparse::NondeterministicModel< ValueType, RewardModelType > > transform(storm::storage::BitVector const &enabledActions) const
Constructs an MDP by copying the current MDP and restricting the choices of each state to the ones gi...
#define STORM_LOG_THROW(cond, exception, message)
Definition macros.h:30
LabParser.cpp.
Definition cli.cpp:18
std::optional< storm::storage::sparse::StateValuations > stateValuations
std::unordered_map< std::string, RewardModelType > rewardModels
boost::optional< storm::storage::BitVector > markovianStates
std::optional< std::shared_ptr< storm::storage::sparse::ChoiceOrigins > > choiceOrigins
std::optional< storm::models::sparse::ChoiceLabeling > choiceLabeling
storm::models::sparse::StateLabeling stateLabeling
std::optional< std::vector< uint32_t > > observabilityClasses
boost::optional< std::vector< ValueType > > exitRates