Linux 4.19.133
[linux/fpc-iii.git] / drivers / acpi / acpica / psparse.c
bloba16a6ea5ae02aded254903cc265fa34b874c8033
1 // SPDX-License-Identifier: BSD-3-Clause OR GPL-2.0
2 /******************************************************************************
4 * Module Name: psparse - Parser top level AML parse routines
6 * Copyright (C) 2000 - 2018, Intel Corp.
8 *****************************************************************************/
11 * Parse the AML and build an operation tree as most interpreters,
12 * like Perl, do. Parsing is done by hand rather than with a YACC
13 * generated parser to tightly constrain stack and dynamic memory
14 * usage. At the same time, parsing is kept flexible and the code
15 * fairly compact by parsing based on a list of AML opcode
16 * templates in aml_op_info[]
19 #include <acpi/acpi.h>
20 #include "accommon.h"
21 #include "acparser.h"
22 #include "acdispat.h"
23 #include "amlcode.h"
24 #include "acinterp.h"
25 #include "acnamesp.h"
27 #define _COMPONENT ACPI_PARSER
28 ACPI_MODULE_NAME("psparse")
30 /*******************************************************************************
32 * FUNCTION: acpi_ps_get_opcode_size
34 * PARAMETERS: opcode - An AML opcode
36 * RETURN: Size of the opcode, in bytes (1 or 2)
38 * DESCRIPTION: Get the size of the current opcode.
40 ******************************************************************************/
41 u32 acpi_ps_get_opcode_size(u32 opcode)
44 /* Extended (2-byte) opcode if > 255 */
46 if (opcode > 0x00FF) {
47 return (2);
50 /* Otherwise, just a single byte opcode */
52 return (1);
55 /*******************************************************************************
57 * FUNCTION: acpi_ps_peek_opcode
59 * PARAMETERS: parser_state - A parser state object
61 * RETURN: Next AML opcode
63 * DESCRIPTION: Get next AML opcode (without incrementing AML pointer)
65 ******************************************************************************/
67 u16 acpi_ps_peek_opcode(struct acpi_parse_state * parser_state)
69 u8 *aml;
70 u16 opcode;
72 aml = parser_state->aml;
73 opcode = (u16) ACPI_GET8(aml);
75 if (opcode == AML_EXTENDED_PREFIX) {
77 /* Extended opcode, get the second opcode byte */
79 aml++;
80 opcode = (u16) ((opcode << 8) | ACPI_GET8(aml));
83 return (opcode);
86 /*******************************************************************************
88 * FUNCTION: acpi_ps_complete_this_op
90 * PARAMETERS: walk_state - Current State
91 * op - Op to complete
93 * RETURN: Status
95 * DESCRIPTION: Perform any cleanup at the completion of an Op.
97 ******************************************************************************/
99 acpi_status
100 acpi_ps_complete_this_op(struct acpi_walk_state *walk_state,
101 union acpi_parse_object *op)
103 union acpi_parse_object *prev;
104 union acpi_parse_object *next;
105 const struct acpi_opcode_info *parent_info;
106 union acpi_parse_object *replacement_op = NULL;
107 acpi_status status = AE_OK;
109 ACPI_FUNCTION_TRACE_PTR(ps_complete_this_op, op);
111 /* Check for null Op, can happen if AML code is corrupt */
113 if (!op) {
114 return_ACPI_STATUS(AE_OK); /* OK for now */
117 acpi_ex_stop_trace_opcode(op, walk_state);
119 /* Delete this op and the subtree below it if asked to */
121 if (((walk_state->parse_flags & ACPI_PARSE_TREE_MASK) !=
122 ACPI_PARSE_DELETE_TREE)
123 || (walk_state->op_info->class == AML_CLASS_ARGUMENT)) {
124 return_ACPI_STATUS(AE_OK);
127 /* Make sure that we only delete this subtree */
129 if (op->common.parent) {
130 prev = op->common.parent->common.value.arg;
131 if (!prev) {
133 /* Nothing more to do */
135 goto cleanup;
139 * Check if we need to replace the operator and its subtree
140 * with a return value op (placeholder op)
142 parent_info =
143 acpi_ps_get_opcode_info(op->common.parent->common.
144 aml_opcode);
146 switch (parent_info->class) {
147 case AML_CLASS_CONTROL:
149 break;
151 case AML_CLASS_CREATE:
153 * These opcodes contain term_arg operands. The current
154 * op must be replaced by a placeholder return op
156 replacement_op =
157 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
158 op->common.aml);
159 if (!replacement_op) {
160 status = AE_NO_MEMORY;
162 break;
164 case AML_CLASS_NAMED_OBJECT:
166 * These opcodes contain term_arg operands. The current
167 * op must be replaced by a placeholder return op
169 if ((op->common.parent->common.aml_opcode ==
170 AML_REGION_OP)
171 || (op->common.parent->common.aml_opcode ==
172 AML_DATA_REGION_OP)
173 || (op->common.parent->common.aml_opcode ==
174 AML_BUFFER_OP)
175 || (op->common.parent->common.aml_opcode ==
176 AML_PACKAGE_OP)
177 || (op->common.parent->common.aml_opcode ==
178 AML_BANK_FIELD_OP)
179 || (op->common.parent->common.aml_opcode ==
180 AML_VARIABLE_PACKAGE_OP)) {
181 replacement_op =
182 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
183 op->common.aml);
184 if (!replacement_op) {
185 status = AE_NO_MEMORY;
187 } else
188 if ((op->common.parent->common.aml_opcode ==
189 AML_NAME_OP)
190 && (walk_state->pass_number <=
191 ACPI_IMODE_LOAD_PASS2)) {
192 if ((op->common.aml_opcode == AML_BUFFER_OP)
193 || (op->common.aml_opcode == AML_PACKAGE_OP)
194 || (op->common.aml_opcode ==
195 AML_VARIABLE_PACKAGE_OP)) {
196 replacement_op =
197 acpi_ps_alloc_op(op->common.
198 aml_opcode,
199 op->common.aml);
200 if (!replacement_op) {
201 status = AE_NO_MEMORY;
202 } else {
203 replacement_op->named.data =
204 op->named.data;
205 replacement_op->named.length =
206 op->named.length;
210 break;
212 default:
214 replacement_op =
215 acpi_ps_alloc_op(AML_INT_RETURN_VALUE_OP,
216 op->common.aml);
217 if (!replacement_op) {
218 status = AE_NO_MEMORY;
222 /* We must unlink this op from the parent tree */
224 if (prev == op) {
226 /* This op is the first in the list */
228 if (replacement_op) {
229 replacement_op->common.parent =
230 op->common.parent;
231 replacement_op->common.value.arg = NULL;
232 replacement_op->common.node = op->common.node;
233 op->common.parent->common.value.arg =
234 replacement_op;
235 replacement_op->common.next = op->common.next;
236 } else {
237 op->common.parent->common.value.arg =
238 op->common.next;
242 /* Search the parent list */
244 else
245 while (prev) {
247 /* Traverse all siblings in the parent's argument list */
249 next = prev->common.next;
250 if (next == op) {
251 if (replacement_op) {
252 replacement_op->common.parent =
253 op->common.parent;
254 replacement_op->common.value.
255 arg = NULL;
256 replacement_op->common.node =
257 op->common.node;
258 prev->common.next =
259 replacement_op;
260 replacement_op->common.next =
261 op->common.next;
262 next = NULL;
263 } else {
264 prev->common.next =
265 op->common.next;
266 next = NULL;
269 prev = next;
273 cleanup:
275 /* Now we can actually delete the subtree rooted at Op */
277 acpi_ps_delete_parse_tree(op);
278 return_ACPI_STATUS(status);
281 /*******************************************************************************
283 * FUNCTION: acpi_ps_next_parse_state
285 * PARAMETERS: walk_state - Current state
286 * op - Current parse op
287 * callback_status - Status from previous operation
289 * RETURN: Status
291 * DESCRIPTION: Update the parser state based upon the return exception from
292 * the parser callback.
294 ******************************************************************************/
296 acpi_status
297 acpi_ps_next_parse_state(struct acpi_walk_state *walk_state,
298 union acpi_parse_object *op,
299 acpi_status callback_status)
301 struct acpi_parse_state *parser_state = &walk_state->parser_state;
302 acpi_status status = AE_CTRL_PENDING;
304 ACPI_FUNCTION_TRACE_PTR(ps_next_parse_state, op);
306 switch (callback_status) {
307 case AE_CTRL_TERMINATE:
309 * A control method was terminated via a RETURN statement.
310 * The walk of this method is complete.
312 parser_state->aml = parser_state->aml_end;
313 status = AE_CTRL_TERMINATE;
314 break;
316 case AE_CTRL_BREAK:
318 parser_state->aml = walk_state->aml_last_while;
319 walk_state->control_state->common.value = FALSE;
320 status = AE_CTRL_BREAK;
321 break;
323 case AE_CTRL_CONTINUE:
325 parser_state->aml = walk_state->aml_last_while;
326 status = AE_CTRL_CONTINUE;
327 break;
329 case AE_CTRL_PENDING:
331 parser_state->aml = walk_state->aml_last_while;
332 break;
334 #if 0
335 case AE_CTRL_SKIP:
337 parser_state->aml = parser_state->scope->parse_scope.pkg_end;
338 status = AE_OK;
339 break;
340 #endif
342 case AE_CTRL_TRUE:
344 * Predicate of an IF was true, and we are at the matching ELSE.
345 * Just close out this package
347 parser_state->aml = acpi_ps_get_next_package_end(parser_state);
348 status = AE_CTRL_PENDING;
349 break;
351 case AE_CTRL_FALSE:
353 * Either an IF/WHILE Predicate was false or we encountered a BREAK
354 * opcode. In both cases, we do not execute the rest of the
355 * package; We simply close out the parent (finishing the walk of
356 * this branch of the tree) and continue execution at the parent
357 * level.
359 parser_state->aml = parser_state->scope->parse_scope.pkg_end;
361 /* In the case of a BREAK, just force a predicate (if any) to FALSE */
363 walk_state->control_state->common.value = FALSE;
364 status = AE_CTRL_END;
365 break;
367 case AE_CTRL_TRANSFER:
369 /* A method call (invocation) -- transfer control */
371 status = AE_CTRL_TRANSFER;
372 walk_state->prev_op = op;
373 walk_state->method_call_op = op;
374 walk_state->method_call_node =
375 (op->common.value.arg)->common.node;
377 /* Will return value (if any) be used by the caller? */
379 walk_state->return_used =
380 acpi_ds_is_result_used(op, walk_state);
381 break;
383 default:
385 status = callback_status;
386 if ((callback_status & AE_CODE_MASK) == AE_CODE_CONTROL) {
387 status = AE_OK;
389 break;
392 return_ACPI_STATUS(status);
395 /*******************************************************************************
397 * FUNCTION: acpi_ps_parse_aml
399 * PARAMETERS: walk_state - Current state
402 * RETURN: Status
404 * DESCRIPTION: Parse raw AML and return a tree of ops
406 ******************************************************************************/
408 acpi_status acpi_ps_parse_aml(struct acpi_walk_state *walk_state)
410 acpi_status status;
411 struct acpi_thread_state *thread;
412 struct acpi_thread_state *prev_walk_list = acpi_gbl_current_walk_list;
413 struct acpi_walk_state *previous_walk_state;
415 ACPI_FUNCTION_TRACE(ps_parse_aml);
417 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
418 "Entered with WalkState=%p Aml=%p size=%X\n",
419 walk_state, walk_state->parser_state.aml,
420 walk_state->parser_state.aml_size));
422 if (!walk_state->parser_state.aml) {
423 return_ACPI_STATUS(AE_BAD_ADDRESS);
426 /* Create and initialize a new thread state */
428 thread = acpi_ut_create_thread_state();
429 if (!thread) {
430 if (walk_state->method_desc) {
432 /* Executing a control method - additional cleanup */
434 acpi_ds_terminate_control_method(walk_state->
435 method_desc,
436 walk_state);
439 acpi_ds_delete_walk_state(walk_state);
440 return_ACPI_STATUS(AE_NO_MEMORY);
443 walk_state->thread = thread;
446 * If executing a method, the starting sync_level is this method's
447 * sync_level
449 if (walk_state->method_desc) {
450 walk_state->thread->current_sync_level =
451 walk_state->method_desc->method.sync_level;
454 acpi_ds_push_walk_state(walk_state, thread);
457 * This global allows the AML debugger to get a handle to the currently
458 * executing control method.
460 acpi_gbl_current_walk_list = thread;
463 * Execute the walk loop as long as there is a valid Walk State. This
464 * handles nested control method invocations without recursion.
466 ACPI_DEBUG_PRINT((ACPI_DB_PARSE, "State=%p\n", walk_state));
468 status = AE_OK;
469 while (walk_state) {
470 if (ACPI_SUCCESS(status)) {
472 * The parse_loop executes AML until the method terminates
473 * or calls another method.
475 status = acpi_ps_parse_loop(walk_state);
478 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
479 "Completed one call to walk loop, %s State=%p\n",
480 acpi_format_exception(status), walk_state));
482 if (status == AE_CTRL_TRANSFER) {
484 * A method call was detected.
485 * Transfer control to the called control method
487 status =
488 acpi_ds_call_control_method(thread, walk_state,
489 NULL);
490 if (ACPI_FAILURE(status)) {
491 status =
492 acpi_ds_method_error(status, walk_state);
496 * If the transfer to the new method method call worked
497 *, a new walk state was created -- get it
499 walk_state = acpi_ds_get_current_walk_state(thread);
500 continue;
501 } else if (status == AE_CTRL_TERMINATE) {
502 status = AE_OK;
503 } else if ((status != AE_OK) && (walk_state->method_desc)) {
505 /* Either the method parse or actual execution failed */
507 acpi_ex_exit_interpreter();
508 if (status == AE_ABORT_METHOD) {
509 acpi_ns_print_node_pathname(walk_state->
510 method_node,
511 "Method aborted:");
512 acpi_os_printf("\n");
513 } else {
514 ACPI_ERROR_METHOD
515 ("Method parse/execution failed",
516 walk_state->method_node, NULL, status);
518 acpi_ex_enter_interpreter();
520 /* Check for possible multi-thread reentrancy problem */
522 if ((status == AE_ALREADY_EXISTS) &&
523 (!(walk_state->method_desc->method.info_flags &
524 ACPI_METHOD_SERIALIZED))) {
526 * Method is not serialized and tried to create an object
527 * twice. The probable cause is that the method cannot
528 * handle reentrancy. Mark as "pending serialized" now, and
529 * then mark "serialized" when the last thread exits.
531 walk_state->method_desc->method.info_flags |=
532 ACPI_METHOD_SERIALIZED_PENDING;
536 /* We are done with this walk, move on to the parent if any */
538 walk_state = acpi_ds_pop_walk_state(thread);
540 /* Reset the current scope to the beginning of scope stack */
542 acpi_ds_scope_stack_clear(walk_state);
545 * If we just returned from the execution of a control method or if we
546 * encountered an error during the method parse phase, there's lots of
547 * cleanup to do
549 if (((walk_state->parse_flags & ACPI_PARSE_MODE_MASK) ==
550 ACPI_PARSE_EXECUTE &&
551 !(walk_state->parse_flags & ACPI_PARSE_MODULE_LEVEL)) ||
552 (ACPI_FAILURE(status))) {
553 acpi_ds_terminate_control_method(walk_state->
554 method_desc,
555 walk_state);
558 /* Delete this walk state and all linked control states */
560 acpi_ps_cleanup_scope(&walk_state->parser_state);
561 previous_walk_state = walk_state;
563 ACPI_DEBUG_PRINT((ACPI_DB_PARSE,
564 "ReturnValue=%p, ImplicitValue=%p State=%p\n",
565 walk_state->return_desc,
566 walk_state->implicit_return_obj, walk_state));
568 /* Check if we have restarted a preempted walk */
570 walk_state = acpi_ds_get_current_walk_state(thread);
571 if (walk_state) {
572 if (ACPI_SUCCESS(status)) {
574 * There is another walk state, restart it.
575 * If the method return value is not used by the parent,
576 * The object is deleted
578 if (!previous_walk_state->return_desc) {
580 * In slack mode execution, if there is no return value
581 * we should implicitly return zero (0) as a default value.
583 if (acpi_gbl_enable_interpreter_slack &&
584 !previous_walk_state->
585 implicit_return_obj) {
586 previous_walk_state->
587 implicit_return_obj =
588 acpi_ut_create_integer_object
589 ((u64) 0);
590 if (!previous_walk_state->
591 implicit_return_obj) {
592 return_ACPI_STATUS
593 (AE_NO_MEMORY);
597 /* Restart the calling control method */
599 status =
600 acpi_ds_restart_control_method
601 (walk_state,
602 previous_walk_state->
603 implicit_return_obj);
604 } else {
606 * We have a valid return value, delete any implicit
607 * return value.
609 acpi_ds_clear_implicit_return
610 (previous_walk_state);
612 status =
613 acpi_ds_restart_control_method
614 (walk_state,
615 previous_walk_state->return_desc);
617 if (ACPI_SUCCESS(status)) {
618 walk_state->walk_type |=
619 ACPI_WALK_METHOD_RESTART;
621 } else {
622 /* On error, delete any return object or implicit return */
624 acpi_ut_remove_reference(previous_walk_state->
625 return_desc);
626 acpi_ds_clear_implicit_return
627 (previous_walk_state);
632 * Just completed a 1st-level method, save the final internal return
633 * value (if any)
635 else if (previous_walk_state->caller_return_desc) {
636 if (previous_walk_state->implicit_return_obj) {
637 *(previous_walk_state->caller_return_desc) =
638 previous_walk_state->implicit_return_obj;
639 } else {
640 /* NULL if no return value */
642 *(previous_walk_state->caller_return_desc) =
643 previous_walk_state->return_desc;
645 } else {
646 if (previous_walk_state->return_desc) {
648 /* Caller doesn't want it, must delete it */
650 acpi_ut_remove_reference(previous_walk_state->
651 return_desc);
653 if (previous_walk_state->implicit_return_obj) {
655 /* Caller doesn't want it, must delete it */
657 acpi_ut_remove_reference(previous_walk_state->
658 implicit_return_obj);
662 acpi_ds_delete_walk_state(previous_walk_state);
665 /* Normal exit */
667 acpi_ex_release_all_mutexes(thread);
668 acpi_ut_delete_generic_state(ACPI_CAST_PTR
669 (union acpi_generic_state, thread));
670 acpi_gbl_current_walk_list = prev_walk_list;
671 return_ACPI_STATUS(status);