WIP FPC-III support
[linux/fpc-iii.git] / drivers / acpi / acpica / psparse.c
blobbd3caf735be38c1ac7e720edd46a0966e5bd44f3
1 // SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0
2 /******************************************************************************
4 * Module Name: psparse - Parser top level AML parse routines
6 * Copyright (C) 2000 - 2020, Intel Corp.
8 *****************************************************************************/
11 * Parse the AML and build an operation tree as most interpreters,
12 * like Perl, do. Parsing is done by hand rather than with a YACC
13 * generated parser to tightly constrain stack and dynamic memory
14 * usage. At the same time, parsing is kept flexible and the code
15 * fairly compact by parsing based on a list of AML opcode
16 * templates in aml_op_info[]
19 #include <acpi/acpi.h>
20 #include "accommon.h"
21 #include "acparser.h"
22 #include "acdispat.h"
23 #include "amlcode.h"
24 #include "acinterp.h"
25 #include "acnamesp.h"
27 #define _COMPONENT ACPI_PARSER
28 ACPI_MODULE_NAME("psparse")
30 /*******************************************************************************
32 * FUNCTION: acpi_ps_get_opcode_size
34 * PARAMETERS: opcode - An AML opcode
36 * RETURN: Size of the opcode, in bytes (1 or 2)
38 * DESCRIPTION: Get the size of the current opcode.
40 ******************************************************************************/
41 u32 acpi_ps_get_opcode_size(u32 opcode)
44 /* Extended (2-byte) opcode if > 255 */
46 if (opcode > 0x00FF) {
47 return (2);
50 /* Otherwise, just a single byte opcode */
52 return (1);
55 /*******************************************************************************
57 * FUNCTION: acpi_ps_peek_opcode
59 * PARAMETERS: parser_state - A parser state object
61 * RETURN: Next AML opcode
63 * DESCRIPTION: Get next AML opcode (without incrementing AML pointer)
65 ******************************************************************************/
67 u16 acpi_ps_peek_opcode(struct acpi_parse_state * parser_state)
69 u8 *aml;
70 u16 opcode;
72 aml = parser_state->aml;
73 opcode = (u16) ACPI_GET8(aml);
75 if (opcode == AML_EXTENDED_PREFIX) {
77 /* Extended opcode, get the second opcode byte */
79 aml++;
80 opcode = (u16) ((opcode << 8) | ACPI_GET8(aml));
83 return (opcode);
86 /*******************************************************************************
88 * FUNCTION: acpi_ps_complete_this_op
90 * PARAMETERS: walk_state - Current State
91 * op - Op to complete
93 * RETURN: Status
95 * DESCRIPTION: Perform any cleanup at the completion of an Op.
97 ******************************************************************************/
99 acpi_status
100 acpi_ps_complete_this_op(struct acpi_walk_state *walk_state,
101 union acpi_parse_object *op)
103 union acpi_parse_object *prev;
104 union acpi_parse_object *next;
105 const struct acpi_opcode_info *parent_info;
106 union acpi_parse_object *replacement_op = NULL;
107 acpi_status status = AE_OK;
109 ACPI_FUNCTION_TRACE_PTR(ps_complete_this_op, op);
111 /* Check for null Op, can happen if AML code is corrupt */
113 if (!op) {
114 return_ACPI_STATUS(AE_OK); /* OK for now */
117 acpi_ex_stop_trace_opcode(op, walk_state);
119 /* Delete this op and the subtree below it if asked to */
121 if (((walk_state->parse_flags & ACPI_PARSE_TREE_MASK) !=
122 ACPI_PARSE_DELETE_TREE)
123 || (walk_state->op_info->class == AML_CLASS_ARGUMENT)) {
124 return_ACPI_STATUS(AE_OK);
127 /* Make sure that we only delete this subtree */
129 if (op->common.parent) {
130 prev = op->common.parent->common.value.arg;
131 if (!prev) {
133 /* Nothing more to do */
135 goto cleanup;
139 * Check if we need to replace the operator and its subtree
140 * with a return value op (placeholder op)
142 parent_info =
143 acpi_ps_get_opcode_info(op->common.parent->common.
144 aml_opcode);
146 switch (parent_info->class) {
147 case AML_CLASS_CONTROL:
149 break;
151 case AML_CLASS_CREATE:
153 * These opcodes contain term_arg operands. The current
154 * op must be replaced by a placeholder return op
156 replacement_op =
157 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
158 op->common.aml);
159 if (!replacement_op) {
160 status = AE_NO_MEMORY;
162 break;
164 case AML_CLASS_NAMED_OBJECT:
166 * These opcodes contain term_arg operands. The current
167 * op must be replaced by a placeholder return op
169 if ((op->common.parent->common.aml_opcode ==
170 AML_REGION_OP)
171 || (op->common.parent->common.aml_opcode ==
172 AML_DATA_REGION_OP)
173 || (op->common.parent->common.aml_opcode ==
174 AML_BUFFER_OP)
175 || (op->common.parent->common.aml_opcode ==
176 AML_PACKAGE_OP)
177 || (op->common.parent->common.aml_opcode ==
178 AML_BANK_FIELD_OP)
179 || (op->common.parent->common.aml_opcode ==
180 AML_VARIABLE_PACKAGE_OP)) {
181 replacement_op =
182 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
183 op->common.aml);
184 if (!replacement_op) {
185 status = AE_NO_MEMORY;
187 } else
188 if ((op->common.parent->common.aml_opcode ==
189 AML_NAME_OP)
190 && (walk_state->pass_number <=
191 ACPI_IMODE_LOAD_PASS2)) {
192 if ((op->common.aml_opcode == AML_BUFFER_OP)
193 || (op->common.aml_opcode == AML_PACKAGE_OP)
194 || (op->common.aml_opcode ==
195 AML_VARIABLE_PACKAGE_OP)) {
196 replacement_op =
197 acpi_ps_alloc_op(op->common.
198 aml_opcode,
199 op->common.aml);
200 if (!replacement_op) {
201 status = AE_NO_MEMORY;
202 } else {
203 replacement_op->named.data =
204 op->named.data;
205 replacement_op->named.length =
206 op->named.length;
210 break;
212 default:
214 replacement_op =
215 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
216 op->common.aml);
217 if (!replacement_op) {
218 status = AE_NO_MEMORY;
222 /* We must unlink this op from the parent tree */
224 if (prev == op) {
226 /* This op is the first in the list */
228 if (replacement_op) {
229 replacement_op->common.parent =
230 op->common.parent;
231 replacement_op->common.value.arg = NULL;
232 replacement_op->common.node = op->common.node;
233 op->common.parent->common.value.arg =
234 replacement_op;
235 replacement_op->common.next = op->common.next;
236 } else {
237 op->common.parent->common.value.arg =
238 op->common.next;
242 /* Search the parent list */
244 else
245 while (prev) {
247 /* Traverse all siblings in the parent's argument list */
249 next = prev->common.next;
250 if (next == op) {
251 if (replacement_op) {
252 replacement_op->common.parent =
253 op->common.parent;
254 replacement_op->common.value.
255 arg = NULL;
256 replacement_op->common.node =
257 op->common.node;
258 prev->common.next =
259 replacement_op;
260 replacement_op->common.next =
261 op->common.next;
262 next = NULL;
263 } else {
264 prev->common.next =
265 op->common.next;
266 next = NULL;
269 prev = next;
273 cleanup:
275 /* Now we can actually delete the subtree rooted at Op */
277 acpi_ps_delete_parse_tree(op);
278 return_ACPI_STATUS(status);
281 /*******************************************************************************
283 * FUNCTION: acpi_ps_next_parse_state
285 * PARAMETERS: walk_state - Current state
286 * op - Current parse op
287 * callback_status - Status from previous operation
289 * RETURN: Status
291 * DESCRIPTION: Update the parser state based upon the return exception from
292 * the parser callback.
294 ******************************************************************************/
296 acpi_status
297 acpi_ps_next_parse_state(struct acpi_walk_state *walk_state,
298 union acpi_parse_object *op,
299 acpi_status callback_status)
301 struct acpi_parse_state *parser_state = &walk_state->parser_state;
302 acpi_status status = AE_CTRL_PENDING;
304 ACPI_FUNCTION_TRACE_PTR(ps_next_parse_state, op);
306 switch (callback_status) {
307 case AE_CTRL_TERMINATE:
309 * A control method was terminated via a RETURN statement.
310 * The walk of this method is complete.
312 parser_state->aml = parser_state->aml_end;
313 status = AE_CTRL_TERMINATE;
314 break;
316 case AE_CTRL_BREAK:
318 parser_state->aml = walk_state->aml_last_while;
319 walk_state->control_state->common.value = FALSE;
320 status = AE_CTRL_BREAK;
321 break;
323 case AE_CTRL_CONTINUE:
325 parser_state->aml = walk_state->aml_last_while;
326 status = AE_CTRL_CONTINUE;
327 break;
329 case AE_CTRL_PENDING:
331 parser_state->aml = walk_state->aml_last_while;
332 break;
334 #if 0
335 case AE_CTRL_SKIP:
337 parser_state->aml = parser_state->scope->parse_scope.pkg_end;
338 status = AE_OK;
339 break;
340 #endif
342 case AE_CTRL_TRUE:
344 * Predicate of an IF was true, and we are at the matching ELSE.
345 * Just close out this package
347 parser_state->aml = acpi_ps_get_next_package_end(parser_state);
348 status = AE_CTRL_PENDING;
349 break;
351 case AE_CTRL_FALSE:
353 * Either an IF/WHILE Predicate was false or we encountered a BREAK
354 * opcode. In both cases, we do not execute the rest of the
355 * package; We simply close out the parent (finishing the walk of
356 * this branch of the tree) and continue execution at the parent
357 * level.
359 parser_state->aml = parser_state->scope->parse_scope.pkg_end;
361 /* In the case of a BREAK, just force a predicate (if any) to FALSE */
363 walk_state->control_state->common.value = FALSE;
364 status = AE_CTRL_END;
365 break;
367 case AE_CTRL_TRANSFER:
369 /* A method call (invocation) -- transfer control */
371 status = AE_CTRL_TRANSFER;
372 walk_state->prev_op = op;
373 walk_state->method_call_op = op;
374 walk_state->method_call_node =
375 (op->common.value.arg)->common.node;
377 /* Will return value (if any) be used by the caller? */
379 walk_state->return_used =
380 acpi_ds_is_result_used(op, walk_state);
381 break;
383 default:
385 status = callback_status;
386 if ((callback_status & AE_CODE_MASK) == AE_CODE_CONTROL) {
387 status = AE_OK;
389 break;
392 return_ACPI_STATUS(status);
395 /*******************************************************************************
397 * FUNCTION: acpi_ps_parse_aml
399 * PARAMETERS: walk_state - Current state
402 * RETURN: Status
404 * DESCRIPTION: Parse raw AML and return a tree of ops
406 ******************************************************************************/
408 acpi_status acpi_ps_parse_aml(struct acpi_walk_state *walk_state)
410 acpi_status status;
411 struct acpi_thread_state *thread;
412 struct acpi_thread_state *prev_walk_list = acpi_gbl_current_walk_list;
413 struct acpi_walk_state *previous_walk_state;
415 ACPI_FUNCTION_TRACE(ps_parse_aml);
417 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
418 "Entered with WalkState=%p Aml=%p size=%X\n",
419 walk_state, walk_state->parser_state.aml,
420 walk_state->parser_state.aml_size));
422 if (!walk_state->parser_state.aml) {
423 return_ACPI_STATUS(AE_BAD_ADDRESS);
426 /* Create and initialize a new thread state */
428 thread = acpi_ut_create_thread_state();
429 if (!thread) {
430 if (walk_state->method_desc) {
432 /* Executing a control method - additional cleanup */
434 acpi_ds_terminate_control_method(walk_state->
435 method_desc,
436 walk_state);
439 acpi_ds_delete_walk_state(walk_state);
440 return_ACPI_STATUS(AE_NO_MEMORY);
443 walk_state->thread = thread;
446 * If executing a method, the starting sync_level is this method's
447 * sync_level
449 if (walk_state->method_desc) {
450 walk_state->thread->current_sync_level =
451 walk_state->method_desc->method.sync_level;
454 acpi_ds_push_walk_state(walk_state, thread);
457 * This global allows the AML debugger to get a handle to the currently
458 * executing control method.
460 acpi_gbl_current_walk_list = thread;
463 * Execute the walk loop as long as there is a valid Walk State. This
464 * handles nested control method invocations without recursion.
466 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, "State=%p\n", walk_state));
468 status = AE_OK;
469 while (walk_state) {
470 if (ACPI_SUCCESS(status)) {
472 * The parse_loop executes AML until the method terminates
473 * or calls another method.
475 status = acpi_ps_parse_loop(walk_state);
478 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
479 "Completed one call to walk loop, %s State=%p\n",
480 acpi_format_exception(status), walk_state));
482 if (walk_state->method_pathname && walk_state->method_is_nested) {
484 /* Optional object evaluation log */
486 ACPI_DEBUG_PRINT_RAW((ACPI_DB_EVALUATION,
487 "%-26s: %*s%s\n",
488 " Exit nested method",
489 (walk_state->
490 method_nesting_depth + 1) * 3,
491 " ",
492 &walk_state->method_pathname[1]));
494 ACPI_FREE(walk_state->method_pathname);
495 walk_state->method_is_nested = FALSE;
497 if (status == AE_CTRL_TRANSFER) {
499 * A method call was detected.
500 * Transfer control to the called control method
502 status =
503 acpi_ds_call_control_method(thread, walk_state,
504 NULL);
505 if (ACPI_FAILURE(status)) {
506 status =
507 acpi_ds_method_error(status, walk_state);
511 * If the transfer to the new method method call worked,
512 * a new walk state was created -- get it
514 walk_state = acpi_ds_get_current_walk_state(thread);
515 continue;
516 } else if (status == AE_CTRL_TERMINATE) {
517 status = AE_OK;
518 } else if ((status != AE_OK) && (walk_state->method_desc)) {
520 /* Either the method parse or actual execution failed */
522 acpi_ex_exit_interpreter();
523 if (status == AE_ABORT_METHOD) {
524 acpi_ns_print_node_pathname(walk_state->
525 method_node,
526 "Aborting method");
527 acpi_os_printf("\n");
528 } else {
529 ACPI_ERROR_METHOD("Aborting method",
530 walk_state->method_node, NULL,
531 status);
533 acpi_ex_enter_interpreter();
535 /* Check for possible multi-thread reentrancy problem */
537 if ((status == AE_ALREADY_EXISTS) &&
538 (!(walk_state->method_desc->method.info_flags &
539 ACPI_METHOD_SERIALIZED))) {
541 * Method is not serialized and tried to create an object
542 * twice. The probable cause is that the method cannot
543 * handle reentrancy. Mark as "pending serialized" now, and
544 * then mark "serialized" when the last thread exits.
546 walk_state->method_desc->method.info_flags |=
547 ACPI_METHOD_SERIALIZED_PENDING;
551 /* We are done with this walk, move on to the parent if any */
553 walk_state = acpi_ds_pop_walk_state(thread);
555 /* Reset the current scope to the beginning of scope stack */
557 acpi_ds_scope_stack_clear(walk_state);
560 * If we just returned from the execution of a control method or if we
561 * encountered an error during the method parse phase, there's lots of
562 * cleanup to do
564 if (((walk_state->parse_flags & ACPI_PARSE_MODE_MASK) ==
565 ACPI_PARSE_EXECUTE &&
566 !(walk_state->parse_flags & ACPI_PARSE_MODULE_LEVEL)) ||
567 (ACPI_FAILURE(status))) {
568 acpi_ds_terminate_control_method(walk_state->
569 method_desc,
570 walk_state);
573 /* Delete this walk state and all linked control states */
575 acpi_ps_cleanup_scope(&walk_state->parser_state);
576 previous_walk_state = walk_state;
578 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
579 "ReturnValue=%p, ImplicitValue=%p State=%p\n",
580 walk_state->return_desc,
581 walk_state->implicit_return_obj, walk_state));
583 /* Check if we have restarted a preempted walk */
585 walk_state = acpi_ds_get_current_walk_state(thread);
586 if (walk_state) {
587 if (ACPI_SUCCESS(status)) {
589 * There is another walk state, restart it.
590 * If the method return value is not used by the parent,
591 * The object is deleted
593 if (!previous_walk_state->return_desc) {
595 * In slack mode execution, if there is no return value
596 * we should implicitly return zero (0) as a default value.
598 if (acpi_gbl_enable_interpreter_slack &&
599 !previous_walk_state->
600 implicit_return_obj) {
601 previous_walk_state->
602 implicit_return_obj =
603 acpi_ut_create_integer_object
604 ((u64) 0);
605 if (!previous_walk_state->
606 implicit_return_obj) {
607 return_ACPI_STATUS
608 (AE_NO_MEMORY);
612 /* Restart the calling control method */
614 status =
615 acpi_ds_restart_control_method
616 (walk_state,
617 previous_walk_state->
618 implicit_return_obj);
619 } else {
621 * We have a valid return value, delete any implicit
622 * return value.
624 acpi_ds_clear_implicit_return
625 (previous_walk_state);
627 status =
628 acpi_ds_restart_control_method
629 (walk_state,
630 previous_walk_state->return_desc);
632 if (ACPI_SUCCESS(status)) {
633 walk_state->walk_type |=
634 ACPI_WALK_METHOD_RESTART;
636 } else {
637 /* On error, delete any return object or implicit return */
639 acpi_ut_remove_reference(previous_walk_state->
640 return_desc);
641 acpi_ds_clear_implicit_return
642 (previous_walk_state);
647 * Just completed a 1st-level method, save the final internal return
648 * value (if any)
650 else if (previous_walk_state->caller_return_desc) {
651 if (previous_walk_state->implicit_return_obj) {
652 *(previous_walk_state->caller_return_desc) =
653 previous_walk_state->implicit_return_obj;
654 } else {
655 /* NULL if no return value */
657 *(previous_walk_state->caller_return_desc) =
658 previous_walk_state->return_desc;
660 } else {
661 if (previous_walk_state->return_desc) {
663 /* Caller doesn't want it, must delete it */
665 acpi_ut_remove_reference(previous_walk_state->
666 return_desc);
668 if (previous_walk_state->implicit_return_obj) {
670 /* Caller doesn't want it, must delete it */
672 acpi_ut_remove_reference(previous_walk_state->
673 implicit_return_obj);
677 acpi_ds_delete_walk_state(previous_walk_state);
680 /* Normal exit */
682 acpi_ex_release_all_mutexes(thread);
683 acpi_ut_delete_generic_state(ACPI_CAST_PTR
684 (union acpi_generic_state, thread));
685 acpi_gbl_current_walk_list = prev_walk_list;
686 return_ACPI_STATUS(status);