1212import java .util .function .Function ;
1313
1414/**
15- * This agent traverses the nondeterministic environment using a
16- * contingency plan. See page 135, AIMA3e.
15+ * This agent traverses the nondeterministic environment using a contingency plan. See page 135, AIMA3e.
1716 *
1817 * @param <P> The type used to represent percepts
1918 * @param <S> The type used to represent states
2221 * @author Andrew Brown
2322 */
2423public class NondeterministicSearchAgent <P , S , A > extends SimpleAgent <P , A > {
25- /**
26- * Maps percepts to states.
27- */
24+ /** Maps percepts to states. */
2825 private Function <P , S > ptsFunction ;
26+ /** Is informed about every computed contingency plan. */
2927 private Notifier notifier ;
3028
3129 private NondeterministicProblem <S , A > problem ;
@@ -37,18 +35,16 @@ public NondeterministicSearchAgent(Function<P, S> ptsFn) {
3735 }
3836
3937 public NondeterministicSearchAgent (BiFunction <P , Agent <P , A >, S > ptsFn ) {
40- this .ptsFunction = ( percept ) -> ptsFn .apply (percept , this );
38+ this .ptsFunction = percept -> ptsFn .apply (percept , this );
4139 }
4240
43-
4441 public NondeterministicSearchAgent (BiFunction <P , Agent <P , A >, S > ptsFn , Notifier notifier ) {
4542 this (ptsFn );
4643 this .notifier = notifier ;
4744 }
4845
4946 /**
5047 * Computes a contingency plan for the given problem and prepares plan execution.
51- *
5248 * @param problem
5349 * The search problem for this agent to solve.
5450 */
@@ -63,51 +59,49 @@ public void makePlan(NondeterministicProblem<S, A> problem) {
6359 notifier .notify ("Contingency plan: " + contingencyPlan );
6460 }
6561
66- /**
67- * Returns the search problem for this agent.
68- *
69- * @return The search problem for this agent.
70- */
71- public NondeterministicProblem <S , A > getProblem () {
72- return problem ;
73- }
74-
75- /**
76- * Returns the contingency plan of the agent.
77- *
78- * @return The plan the agent uses to clean the vacuum world or null.
79- */
80- public Plan <S , A > getPlan () {
81- return contingencyPlan ;
82- }
83-
8462 /**
8563 * Selects next action from the contingency plan.
86- *
8764 * @param percept A percept.
8865 * @return An action from the contingency plan.
8966 */
9067 @ Override
91- public Optional <A > act (P percept ) {
92- S state = ( S ) ptsFunction .apply (percept );
68+ public final Optional <A > act (P percept ) {
69+ S state = ptsFunction .apply (percept );
9370 // at goal or no plan?
9471 if (problem .testGoal (state ) || contingencyPlan == null )
9572 return Optional .empty ();
9673
9774 currStep ++;
98- // end of plan reached?
99- if (currStep == contingencyPlan .size ()) {
100- contingencyPlan = null ;
101- return Optional .empty ();
75+ while (true ) {
76+ // end of plan reached?
77+ if (currStep == contingencyPlan .size ()) {
78+ contingencyPlan = null ;
79+ return Optional .empty ();
80+ }
81+
82+ // next step is action step?
83+ if (contingencyPlan .isActionStep (currStep ))
84+ return Optional .of (contingencyPlan .getAction (currStep ));
85+
86+ // determine next sub-plan and execute it!
87+ contingencyPlan = contingencyPlan .getPlan (currStep , state );
88+ currStep = 0 ;
10289 }
90+ }
10391
104- // next step is action step?
105- if (contingencyPlan .isActionStep (currStep ))
106- return Optional .of (contingencyPlan .getAction (currStep ));
92+ /**
93+ * Returns the search problem for this agent.
94+ * @return The search problem for this agent.
95+ */
96+ public NondeterministicProblem <S , A > getProblem () {
97+ return problem ;
98+ }
10799
108- // determine next sub-plan and act it!
109- contingencyPlan = contingencyPlan .getPlan (currStep , state );
110- currStep = -1 ;
111- return act (percept );
100+ /**
101+ * Returns the contingency plan of the agent.
102+ * @return The plan the agent uses to clean the vacuum world or null.
103+ */
104+ public Plan <S , A > getPlan () {
105+ return contingencyPlan ;
112106 }
113107}
0 commit comments