Storm 1.11.1.1
A Modern Probabilistic Model Checker
Loading...
Searching...
No Matches
ChoiceSelector.cpp
Go to the documentation of this file.
2
8
9namespace storm {
10namespace transformer {
11template<typename ValueType, typename RewardModelType>
12std::shared_ptr<storm::models::sparse::NondeterministicModel<ValueType, RewardModelType>> ChoiceSelector<ValueType, RewardModelType>::transform(
13 storm::storage::BitVector const& enabledActions) const {
14 storm::storage::sparse::ModelComponents<ValueType, RewardModelType> newComponents(inputModel.getTransitionMatrix().restrictRows(enabledActions));
15 newComponents.stateLabeling = inputModel.getStateLabeling();
16 for (auto const& rewardModel : inputModel.getRewardModels()) {
17 newComponents.rewardModels.emplace(rewardModel.first, rewardModel.second.restrictActions(enabledActions));
18 }
19 if (inputModel.hasChoiceLabeling()) {
20 newComponents.choiceLabeling = inputModel.getChoiceLabeling().getSubLabeling(enabledActions);
21 }
22 newComponents.stateValuations = inputModel.getOptionalStateValuations();
23 if (inputModel.hasChoiceOrigins()) {
24 newComponents.choiceOrigins = inputModel.getChoiceOrigins()->selectChoices(enabledActions);
25 }
26
27 if (inputModel.isOfType(storm::models::ModelType::MarkovAutomaton)) {
28 auto const& ma = *inputModel.template as<storm::models::sparse::MarkovAutomaton<ValueType, RewardModelType>>();
29 newComponents.markovianStates = ma.getMarkovianStates();
30 newComponents.exitRates = ma.getExitRates();
31 return std::make_shared<storm::models::sparse::MarkovAutomaton<ValueType, RewardModelType>>(std::move(newComponents));
32 } else if (inputModel.getType() == storm::models::ModelType::Pomdp) {
33 newComponents.observabilityClasses = static_cast<storm::models::sparse::Pomdp<ValueType, RewardModelType> const&>(inputModel).getObservations();
34 return std::make_shared<storm::models::sparse::Pomdp<ValueType, RewardModelType>>(std::move(newComponents));
35 } else {
36 STORM_LOG_THROW(inputModel.isOfType(storm::models::ModelType::Mdp), storm::exceptions::UnexpectedException,
37 "Unexpected model type for choice selector.");
38 return std::make_shared<storm::models::sparse::Mdp<ValueType, RewardModelType>>(std::move(newComponents));
39 }
40}
41
42template class ChoiceSelector<double>;
44} // namespace transformer
45} // namespace storm
This class represents a partially observable Markov decision process.
Definition Pomdp.h:13
A bit vector that is internally represented as a vector of 64-bit values.
Definition BitVector.h:16
std::shared_ptr< storm::models::sparse::NondeterministicModel< ValueType, RewardModelType > > transform(storm::storage::BitVector const &enabledActions) const
Constructs an MDP by copying the current MDP and restricting the choices of each state to the ones gi...
#define STORM_LOG_THROW(cond, exception, message)
Definition macros.h:30
std::optional< storm::storage::sparse::StateValuations > stateValuations
std::unordered_map< std::string, RewardModelType > rewardModels
boost::optional< storm::storage::BitVector > markovianStates
std::optional< std::shared_ptr< storm::storage::sparse::ChoiceOrigins > > choiceOrigins
std::optional< storm::models::sparse::ChoiceLabeling > choiceLabeling
storm::models::sparse::StateLabeling stateLabeling
std::optional< std::vector< uint32_t > > observabilityClasses
boost::optional< std::vector< ValueType > > exitRates