1 // SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0
2 /******************************************************************************
4 * Module Name: psparse - Parser top level AML parse routines
6 * Copyright (C) 2000 - 2020, Intel Corp.
8 *****************************************************************************/
11 * Parse the AML and build an operation tree as most interpreters,
12 * like Perl, do. Parsing is done by hand rather than with a YACC
13 * generated parser to tightly constrain stack and dynamic memory
14 * usage. At the same time, parsing is kept flexible and the code
15 * fairly compact by parsing based on a list of AML opcode
16 * templates in aml_op_info[]
19 #include <acpi/acpi.h>
27 #define _COMPONENT ACPI_PARSER
28 ACPI_MODULE_NAME("psparse")
30 /*******************************************************************************
32 * FUNCTION: acpi_ps_get_opcode_size
34 * PARAMETERS: opcode - An AML opcode
36 * RETURN: Size of the opcode, in bytes (1 or 2)
38 * DESCRIPTION: Get the size of the current opcode.
40 ******************************************************************************/
41 u32
acpi_ps_get_opcode_size(u32 opcode
)
44 /* Extended (2-byte) opcode if > 255 */
46 if (opcode
> 0x00FF) {
50 /* Otherwise, just a single byte opcode */
55 /*******************************************************************************
57 * FUNCTION: acpi_ps_peek_opcode
59 * PARAMETERS: parser_state - A parser state object
61 * RETURN: Next AML opcode
63 * DESCRIPTION: Get next AML opcode (without incrementing AML pointer)
65 ******************************************************************************/
67 u16
acpi_ps_peek_opcode(struct acpi_parse_state
* parser_state
)
72 aml
= parser_state
->aml
;
73 opcode
= (u16
) ACPI_GET8(aml
);
75 if (opcode
== AML_EXTENDED_PREFIX
) {
77 /* Extended opcode, get the second opcode byte */
80 opcode
= (u16
) ((opcode
<< 8) | ACPI_GET8(aml
));
86 /*******************************************************************************
88 * FUNCTION: acpi_ps_complete_this_op
90 * PARAMETERS: walk_state - Current State
95 * DESCRIPTION: Perform any cleanup at the completion of an Op.
97 ******************************************************************************/
100 acpi_ps_complete_this_op(struct acpi_walk_state
*walk_state
,
101 union acpi_parse_object
*op
)
103 union acpi_parse_object
*prev
;
104 union acpi_parse_object
*next
;
105 const struct acpi_opcode_info
*parent_info
;
106 union acpi_parse_object
*replacement_op
= NULL
;
107 acpi_status status
= AE_OK
;
109 ACPI_FUNCTION_TRACE_PTR(ps_complete_this_op
, op
);
111 /* Check for null Op, can happen if AML code is corrupt */
114 return_ACPI_STATUS(AE_OK
); /* OK for now */
117 acpi_ex_stop_trace_opcode(op
, walk_state
);
119 /* Delete this op and the subtree below it if asked to */
121 if (((walk_state
->parse_flags
& ACPI_PARSE_TREE_MASK
) !=
122 ACPI_PARSE_DELETE_TREE
)
123 || (walk_state
->op_info
->class == AML_CLASS_ARGUMENT
)) {
124 return_ACPI_STATUS(AE_OK
);
127 /* Make sure that we only delete this subtree */
129 if (op
->common
.parent
) {
130 prev
= op
->common
.parent
->common
.value
.arg
;
133 /* Nothing more to do */
139 * Check if we need to replace the operator and its subtree
140 * with a return value op (placeholder op)
143 acpi_ps_get_opcode_info(op
->common
.parent
->common
.
146 switch (parent_info
->class) {
147 case AML_CLASS_CONTROL
:
151 case AML_CLASS_CREATE
:
153 * These opcodes contain term_arg operands. The current
154 * op must be replaced by a placeholder return op
157 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP
,
159 if (!replacement_op
) {
160 status
= AE_NO_MEMORY
;
164 case AML_CLASS_NAMED_OBJECT
:
166 * These opcodes contain term_arg operands. The current
167 * op must be replaced by a placeholder return op
169 if ((op
->common
.parent
->common
.aml_opcode
==
171 || (op
->common
.parent
->common
.aml_opcode
==
173 || (op
->common
.parent
->common
.aml_opcode
==
175 || (op
->common
.parent
->common
.aml_opcode
==
177 || (op
->common
.parent
->common
.aml_opcode
==
179 || (op
->common
.parent
->common
.aml_opcode
==
180 AML_VARIABLE_PACKAGE_OP
)) {
182 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP
,
184 if (!replacement_op
) {
185 status
= AE_NO_MEMORY
;
188 if ((op
->common
.parent
->common
.aml_opcode
==
190 && (walk_state
->pass_number
<=
191 ACPI_IMODE_LOAD_PASS2
)) {
192 if ((op
->common
.aml_opcode
== AML_BUFFER_OP
)
193 || (op
->common
.aml_opcode
== AML_PACKAGE_OP
)
194 || (op
->common
.aml_opcode
==
195 AML_VARIABLE_PACKAGE_OP
)) {
197 acpi_ps_alloc_op(op
->common
.
200 if (!replacement_op
) {
201 status
= AE_NO_MEMORY
;
203 replacement_op
->named
.data
=
205 replacement_op
->named
.length
=
215 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP
,
217 if (!replacement_op
) {
218 status
= AE_NO_MEMORY
;
222 /* We must unlink this op from the parent tree */
226 /* This op is the first in the list */
228 if (replacement_op
) {
229 replacement_op
->common
.parent
=
231 replacement_op
->common
.value
.arg
= NULL
;
232 replacement_op
->common
.node
= op
->common
.node
;
233 op
->common
.parent
->common
.value
.arg
=
235 replacement_op
->common
.next
= op
->common
.next
;
237 op
->common
.parent
->common
.value
.arg
=
242 /* Search the parent list */
247 /* Traverse all siblings in the parent's argument list */
249 next
= prev
->common
.next
;
251 if (replacement_op
) {
252 replacement_op
->common
.parent
=
254 replacement_op
->common
.value
.
256 replacement_op
->common
.node
=
260 replacement_op
->common
.next
=
275 /* Now we can actually delete the subtree rooted at Op */
277 acpi_ps_delete_parse_tree(op
);
278 return_ACPI_STATUS(status
);
281 /*******************************************************************************
283 * FUNCTION: acpi_ps_next_parse_state
285 * PARAMETERS: walk_state - Current state
286 * op - Current parse op
287 * callback_status - Status from previous operation
291 * DESCRIPTION: Update the parser state based upon the return exception from
292 * the parser callback.
294 ******************************************************************************/
297 acpi_ps_next_parse_state(struct acpi_walk_state
*walk_state
,
298 union acpi_parse_object
*op
,
299 acpi_status callback_status
)
301 struct acpi_parse_state
*parser_state
= &walk_state
->parser_state
;
302 acpi_status status
= AE_CTRL_PENDING
;
304 ACPI_FUNCTION_TRACE_PTR(ps_next_parse_state
, op
);
306 switch (callback_status
) {
307 case AE_CTRL_TERMINATE
:
309 * A control method was terminated via a RETURN statement.
310 * The walk of this method is complete.
312 parser_state
->aml
= parser_state
->aml_end
;
313 status
= AE_CTRL_TERMINATE
;
318 parser_state
->aml
= walk_state
->aml_last_while
;
319 walk_state
->control_state
->common
.value
= FALSE
;
320 status
= AE_CTRL_BREAK
;
323 case AE_CTRL_CONTINUE
:
325 parser_state
->aml
= walk_state
->aml_last_while
;
326 status
= AE_CTRL_CONTINUE
;
329 case AE_CTRL_PENDING
:
331 parser_state
->aml
= walk_state
->aml_last_while
;
337 parser_state
->aml
= parser_state
->scope
->parse_scope
.pkg_end
;
344 * Predicate of an IF was true, and we are at the matching ELSE.
345 * Just close out this package
347 parser_state
->aml
= acpi_ps_get_next_package_end(parser_state
);
348 status
= AE_CTRL_PENDING
;
353 * Either an IF/WHILE Predicate was false or we encountered a BREAK
354 * opcode. In both cases, we do not execute the rest of the
355 * package; We simply close out the parent (finishing the walk of
356 * this branch of the tree) and continue execution at the parent
359 parser_state
->aml
= parser_state
->scope
->parse_scope
.pkg_end
;
361 /* In the case of a BREAK, just force a predicate (if any) to FALSE */
363 walk_state
->control_state
->common
.value
= FALSE
;
364 status
= AE_CTRL_END
;
367 case AE_CTRL_TRANSFER
:
369 /* A method call (invocation) -- transfer control */
371 status
= AE_CTRL_TRANSFER
;
372 walk_state
->prev_op
= op
;
373 walk_state
->method_call_op
= op
;
374 walk_state
->method_call_node
=
375 (op
->common
.value
.arg
)->common
.node
;
377 /* Will return value (if any) be used by the caller? */
379 walk_state
->return_used
=
380 acpi_ds_is_result_used(op
, walk_state
);
385 status
= callback_status
;
386 if ((callback_status
& AE_CODE_MASK
) == AE_CODE_CONTROL
) {
392 return_ACPI_STATUS(status
);
395 /*******************************************************************************
397 * FUNCTION: acpi_ps_parse_aml
399 * PARAMETERS: walk_state - Current state
404 * DESCRIPTION: Parse raw AML and return a tree of ops
406 ******************************************************************************/
408 acpi_status
acpi_ps_parse_aml(struct acpi_walk_state
*walk_state
)
411 struct acpi_thread_state
*thread
;
412 struct acpi_thread_state
*prev_walk_list
= acpi_gbl_current_walk_list
;
413 struct acpi_walk_state
*previous_walk_state
;
415 ACPI_FUNCTION_TRACE(ps_parse_aml
);
417 ACPI_DEBUG_PRINT((ACPI_DB_PARSE
,
418 "Entered with WalkState=%p Aml=%p size=%X\n",
419 walk_state
, walk_state
->parser_state
.aml
,
420 walk_state
->parser_state
.aml_size
));
422 if (!walk_state
->parser_state
.aml
) {
423 return_ACPI_STATUS(AE_BAD_ADDRESS
);
426 /* Create and initialize a new thread state */
428 thread
= acpi_ut_create_thread_state();
430 if (walk_state
->method_desc
) {
432 /* Executing a control method - additional cleanup */
434 acpi_ds_terminate_control_method(walk_state
->
439 acpi_ds_delete_walk_state(walk_state
);
440 return_ACPI_STATUS(AE_NO_MEMORY
);
443 walk_state
->thread
= thread
;
446 * If executing a method, the starting sync_level is this method's
449 if (walk_state
->method_desc
) {
450 walk_state
->thread
->current_sync_level
=
451 walk_state
->method_desc
->method
.sync_level
;
454 acpi_ds_push_walk_state(walk_state
, thread
);
457 * This global allows the AML debugger to get a handle to the currently
458 * executing control method.
460 acpi_gbl_current_walk_list
= thread
;
463 * Execute the walk loop as long as there is a valid Walk State. This
464 * handles nested control method invocations without recursion.
466 ACPI_DEBUG_PRINT((ACPI_DB_PARSE
, "State=%p\n", walk_state
));
470 if (ACPI_SUCCESS(status
)) {
472 * The parse_loop executes AML until the method terminates
473 * or calls another method.
475 status
= acpi_ps_parse_loop(walk_state
);
478 ACPI_DEBUG_PRINT((ACPI_DB_PARSE
,
479 "Completed one call to walk loop, %s State=%p\n",
480 acpi_format_exception(status
), walk_state
));
482 if (walk_state
->method_pathname
&& walk_state
->method_is_nested
) {
484 /* Optional object evaluation log */
486 ACPI_DEBUG_PRINT_RAW((ACPI_DB_EVALUATION
,
488 " Exit nested method",
490 method_nesting_depth
+ 1) * 3,
492 &walk_state
->method_pathname
[1]));
494 ACPI_FREE(walk_state
->method_pathname
);
495 walk_state
->method_is_nested
= FALSE
;
497 if (status
== AE_CTRL_TRANSFER
) {
499 * A method call was detected.
500 * Transfer control to the called control method
503 acpi_ds_call_control_method(thread
, walk_state
,
505 if (ACPI_FAILURE(status
)) {
507 acpi_ds_method_error(status
, walk_state
);
511 * If the transfer to the new method method call worked,
512 * a new walk state was created -- get it
514 walk_state
= acpi_ds_get_current_walk_state(thread
);
516 } else if (status
== AE_CTRL_TERMINATE
) {
518 } else if ((status
!= AE_OK
) && (walk_state
->method_desc
)) {
520 /* Either the method parse or actual execution failed */
522 acpi_ex_exit_interpreter();
523 if (status
== AE_ABORT_METHOD
) {
524 acpi_ns_print_node_pathname(walk_state
->
527 acpi_os_printf("\n");
529 ACPI_ERROR_METHOD("Aborting method",
530 walk_state
->method_node
, NULL
,
533 acpi_ex_enter_interpreter();
535 /* Check for possible multi-thread reentrancy problem */
537 if ((status
== AE_ALREADY_EXISTS
) &&
538 (!(walk_state
->method_desc
->method
.info_flags
&
539 ACPI_METHOD_SERIALIZED
))) {
541 * Method is not serialized and tried to create an object
542 * twice. The probable cause is that the method cannot
543 * handle reentrancy. Mark as "pending serialized" now, and
544 * then mark "serialized" when the last thread exits.
546 walk_state
->method_desc
->method
.info_flags
|=
547 ACPI_METHOD_SERIALIZED_PENDING
;
551 /* We are done with this walk, move on to the parent if any */
553 walk_state
= acpi_ds_pop_walk_state(thread
);
555 /* Reset the current scope to the beginning of scope stack */
557 acpi_ds_scope_stack_clear(walk_state
);
560 * If we just returned from the execution of a control method or if we
561 * encountered an error during the method parse phase, there's lots of
564 if (((walk_state
->parse_flags
& ACPI_PARSE_MODE_MASK
) ==
565 ACPI_PARSE_EXECUTE
&&
566 !(walk_state
->parse_flags
& ACPI_PARSE_MODULE_LEVEL
)) ||
567 (ACPI_FAILURE(status
))) {
568 acpi_ds_terminate_control_method(walk_state
->
573 /* Delete this walk state and all linked control states */
575 acpi_ps_cleanup_scope(&walk_state
->parser_state
);
576 previous_walk_state
= walk_state
;
578 ACPI_DEBUG_PRINT((ACPI_DB_PARSE
,
579 "ReturnValue=%p, ImplicitValue=%p State=%p\n",
580 walk_state
->return_desc
,
581 walk_state
->implicit_return_obj
, walk_state
));
583 /* Check if we have restarted a preempted walk */
585 walk_state
= acpi_ds_get_current_walk_state(thread
);
587 if (ACPI_SUCCESS(status
)) {
589 * There is another walk state, restart it.
590 * If the method return value is not used by the parent,
591 * The object is deleted
593 if (!previous_walk_state
->return_desc
) {
595 * In slack mode execution, if there is no return value
596 * we should implicitly return zero (0) as a default value.
598 if (acpi_gbl_enable_interpreter_slack
&&
599 !previous_walk_state
->
600 implicit_return_obj
) {
601 previous_walk_state
->
602 implicit_return_obj
=
603 acpi_ut_create_integer_object
605 if (!previous_walk_state
->
606 implicit_return_obj
) {
612 /* Restart the calling control method */
615 acpi_ds_restart_control_method
617 previous_walk_state
->
618 implicit_return_obj
);
621 * We have a valid return value, delete any implicit
624 acpi_ds_clear_implicit_return
625 (previous_walk_state
);
628 acpi_ds_restart_control_method
630 previous_walk_state
->return_desc
);
632 if (ACPI_SUCCESS(status
)) {
633 walk_state
->walk_type
|=
634 ACPI_WALK_METHOD_RESTART
;
637 /* On error, delete any return object or implicit return */
639 acpi_ut_remove_reference(previous_walk_state
->
641 acpi_ds_clear_implicit_return
642 (previous_walk_state
);
647 * Just completed a 1st-level method, save the final internal return
650 else if (previous_walk_state
->caller_return_desc
) {
651 if (previous_walk_state
->implicit_return_obj
) {
652 *(previous_walk_state
->caller_return_desc
) =
653 previous_walk_state
->implicit_return_obj
;
655 /* NULL if no return value */
657 *(previous_walk_state
->caller_return_desc
) =
658 previous_walk_state
->return_desc
;
661 if (previous_walk_state
->return_desc
) {
663 /* Caller doesn't want it, must delete it */
665 acpi_ut_remove_reference(previous_walk_state
->
668 if (previous_walk_state
->implicit_return_obj
) {
670 /* Caller doesn't want it, must delete it */
672 acpi_ut_remove_reference(previous_walk_state
->
673 implicit_return_obj
);
677 acpi_ds_delete_walk_state(previous_walk_state
);
682 acpi_ex_release_all_mutexes(thread
);
683 acpi_ut_delete_generic_state(ACPI_CAST_PTR
684 (union acpi_generic_state
, thread
));
685 acpi_gbl_current_walk_list
= prev_walk_list
;
686 return_ACPI_STATUS(status
);