Add a capi conversion no-op method to thrift python structs/unions
[hiphop-php.git] / hphp / hhbbc / interp-internal.h
blobb97bd9ab3ee8decb9fb25cd81a9c9c75d252f8be
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #pragma once
18 #include <algorithm>
20 #include "hphp/runtime/base/type-string.h"
21 #include "hphp/runtime/base/array-provenance.h"
23 #include "hphp/hhbbc/analyze.h"
24 #include "hphp/hhbbc/bc.h"
25 #include "hphp/hhbbc/class-util.h"
26 #include "hphp/hhbbc/context.h"
27 #include "hphp/hhbbc/func-util.h"
28 #include "hphp/hhbbc/index.h"
29 #include "hphp/hhbbc/interp-state.h"
30 #include "hphp/hhbbc/interp.h"
31 #include "hphp/hhbbc/options.h"
32 #include "hphp/hhbbc/representation.h"
33 #include "hphp/hhbbc/type-structure.h"
34 #include "hphp/hhbbc/type-system.h"
36 namespace HPHP::HHBBC {
38 struct LocalRange;
40 //////////////////////////////////////////////////////////////////////
42 TRACE_SET_MOD(hhbbc);
44 //////////////////////////////////////////////////////////////////////
46 struct TrackedElemInfo {
47 TrackedElemInfo(uint32_t d, uint32_t i) : depth{d}, idx{i} {}
48 // stack depth of the AddElem we're tracking
49 uint32_t depth;
50 // bytecode index of the previous AddElem
51 uint32_t idx;
55 * Interpreter Step State.
57 * This struct gives interpreter functions access to shared state. It's not in
58 * interp-state.h because it's part of the internal implementation of
59 * interpreter routines. The publicized state as results of interpretation are
60 * in that header and interp.h.
62 struct ISS {
63 ISS(Interp& bag, PropagateFn propagate)
64 : index(bag.index)
65 , ctx(bag.ctx)
66 , collect(bag.collect)
67 , bid(bag.bid)
68 , blk(*bag.blk)
69 , state(bag.state)
70 , undo(bag.undo)
71 , propagate(std::move(propagate))
72 , analyzeDepth(0)
75 const IIndex& index;
76 const AnalysisContext ctx;
77 CollectedInfo& collect;
78 const BlockId bid;
79 const php::Block& blk;
80 State& state;
81 StateMutationUndo* undo;
82 StepFlags flags;
83 PropagateFn propagate;
85 Optional<State> stateBefore;
87 // If we're inside an impl (as opposed to reduce) this will be > 0
88 uint32_t analyzeDepth{0};
89 int32_t srcLoc{-1};
90 bool reprocess{false};
91 // As we process the block, we keep track of the optimized bytecode
92 // stream. We expect that in steady state, there will be no changes;
93 // so as we process the block, if the initial bytecodes are the
94 // same, we just keep track of how many are the same in
95 // unchangedBcs. Once things diverge, the replacements are stored in
96 // replacedBcs.
98 // number of unchanged bcs to take from blk.hhbcs
99 uint32_t unchangedBcs{0};
100 // new bytecodes
101 BytecodeVec replacedBcs;
102 CompactVector<TrackedElemInfo> trackedElems;
105 void impl_vec(ISS& env, bool reduce, BytecodeVec&& bcs);
106 void rewind(ISS& env, const Bytecode&);
107 void rewind(ISS& env, int);
108 const Bytecode* last_op(ISS& env, int idx = 0);
109 const Bytecode* op_from_slot(ISS& env, int, int prev = 0);
111 //////////////////////////////////////////////////////////////////////
113 namespace interp_step {
116 * An interp_step::in(ISS&, const bc::op&) function exists for every
117 * bytecode. Most are defined in interp.cpp, but some (like FCallBuiltin and
118 * member instructions) are defined elsewhere.
120 #define O(opcode, ...) void in(ISS&, const bc::opcode&);
121 OPCODES
122 #undef O
127 * Find a contiguous local range which is equivalent to the given range and has
128 * a smaller starting id. Only returns the equivalent first local because the
129 * size doesn't change.
131 LocalId equivLocalRange(ISS& env, const LocalRange& range);
133 namespace {
135 Type peekLocRaw(ISS& env, LocalId l);
136 bool peekLocCouldBeUninit(ISS& env, LocalId l);
138 #ifdef __clang__
139 #pragma clang diagnostic push
140 #pragma clang diagnostic ignored "-Wunused-function"
141 #endif
144 * impl(...)
146 * Utility for chaining one bytecode implementation to a series of a few
147 * others. Use reduce() if you also want to enable strength reduction
148 * (i.e. the bytecode can be replaced by some other bytecode as an
149 * optimization).
151 * The chained-to bytecodes should not take branches. For impl, the
152 * canConstProp flag will only be set if it was set for all the
153 * bytecodes.
156 template<class... Ts>
157 void impl(ISS& env, Ts&&... ts) {
158 impl_vec(env, false, { std::forward<Ts>(ts)... });
162 * Reduce means that (given some situation in the execution state),
163 * a given bytecode could be replaced by some other bytecode
164 * sequence. Ensure that if you call reduce(), it is before any
165 * state-affecting operations (like popC()).
167 void reduce(ISS& env, BytecodeVec&& bcs) {
168 impl_vec(env, true, std::move(bcs));
171 template<class... Bytecodes>
172 void reduce(ISS& env, Bytecodes&&... hhbc) {
173 reduce(env, { std::forward<Bytecodes>(hhbc)... });
176 bool will_reduce(ISS& env) { return env.analyzeDepth == 0; }
178 void nothrow(ISS& env) {
179 FTRACE(2, " nothrow\n");
180 env.flags.wasPEI = false;
183 void unreachable(ISS& env) {
184 FTRACE(2, " unreachable\n");
185 env.state.unreachable = true;
188 void constprop(ISS& env) {
189 FTRACE(2, " constprop\n");
190 env.flags.canConstProp = true;
193 void effect_free(ISS& env) {
194 FTRACE(2, " effect_free\n");
195 nothrow(env);
196 env.flags.effectFree = true;
199 void reanalyze_on_update(ISS& env) {
200 FTRACE(2, " reanalyze_on_update\n");
201 env.collect.reanalyzeOnUpdate = true;
205 * Mark the current block as unconditionally jumping to target. The
206 * caller must arrange for env.state to reflect the state that needs
207 * to be propagated to the target, but it should not propagate that
208 * state.
210 void jmp_setdest(ISS& env, BlockId target) {
211 env.flags.jmpDest = target;
213 void jmp_nevertaken(ISS& env) {
214 jmp_setdest(env, env.blk.fallthrough);
217 void readUnknownParams(ISS& env) {
218 for (LocalId p = 0; p < env.ctx.func->params.size(); p++) {
219 if (p == env.flags.mayReadLocalSet.size()) break;
220 env.flags.mayReadLocalSet.set(p);
222 env.flags.usedParams.set();
225 void readUnknownLocals(ISS& env) {
226 env.flags.mayReadLocalSet.set();
227 env.flags.usedParams.set();
230 void readAllLocals(ISS& env) {
231 env.flags.mayReadLocalSet.set();
232 env.flags.usedParams.set();
235 void doRet(ISS& env, Type t, bool hasEffects) {
236 assertx(env.state.stack.empty());
237 env.flags.mayReadLocalSet.set();
238 env.flags.retParam = NoLocalId;
239 env.flags.returned = t;
240 if (!hasEffects) effect_free(env);
243 void hasInvariantIterBase(ISS& env) {
244 env.collect.hasInvariantIterBase = true;
247 //////////////////////////////////////////////////////////////////////
248 // eval stack
250 Type popT(ISS& env) {
251 assertx(!env.state.stack.empty());
252 auto const ret = env.state.stack.back().type;
253 FTRACE(2, " pop: {}\n", show(ret));
254 assertx(ret.subtypeOf(BCell));
255 env.state.stack.pop_elem();
256 env.state.topStkIterKeyEquiv = NoIterId;
257 if (env.undo) env.undo->onPop(ret);
258 return ret;
261 Type popC(ISS& env) {
262 auto const v = popT(env);
263 assertx(v.subtypeOf(BInitCell));
264 return v;
267 Type popU(ISS& env) {
268 auto const v = popT(env);
269 assertx(v.subtypeOf(BUninit));
270 return v;
273 Type popCU(ISS& env) {
274 auto const v = popT(env);
275 assertx(v.subtypeOf(BCell));
276 return v;
279 Type popCV(ISS& env) { return popT(env); }
281 void discard(ISS& env, int n) {
282 for (auto i = 0; i < n; ++i) popT(env);
285 const Type& topT(ISS& env, uint32_t idx = 0) {
286 assertx(idx < env.state.stack.size());
287 return env.state.stack[env.state.stack.size() - idx - 1].type;
290 const Type& topC(ISS& env, uint32_t i = 0) {
291 assertx(topT(env, i).subtypeOf(BInitCell));
292 return topT(env, i);
295 const Type& topCV(ISS& env, uint32_t i = 0) { return topT(env, i); }
297 void push(ISS& env, Type t) {
298 FTRACE(2, " push: {}\n", show(t));
299 env.state.stack.push_elem(std::move(t), NoLocalId,
300 env.unchangedBcs + env.replacedBcs.size());
301 env.state.topStkIterKeyEquiv = NoIterId;
302 if (env.undo) env.undo->onPush();
305 void push(ISS& env, Type t, LocalId l) {
306 if (l == NoLocalId) return push(env, t);
307 if (l <= MaxLocalId && is_volatile_local(env.ctx.func, l)) {
308 return push(env, t);
310 FTRACE(2, " push: {} (={})\n", show(t), local_string(*env.ctx.func, l));
311 env.state.stack.push_elem(std::move(t), l,
312 env.unchangedBcs + env.replacedBcs.size());
313 env.state.topStkIterKeyEquiv = NoIterId;
314 if (env.undo) env.undo->onPush();
317 //////////////////////////////////////////////////////////////////////
318 // $this
320 void setThisAvailable(ISS& env) {
321 FTRACE(2, " setThisAvailable\n");
322 if (!env.ctx.cls || is_unused_trait(*env.ctx.cls) ||
323 (env.ctx.func->attrs & AttrStatic)) {
324 return unreachable(env);
326 if (!env.state.thisType.couldBe(BObj) ||
327 !env.state.thisType.subtypeOf(BOptObj)) {
328 return unreachable(env);
330 if (env.state.thisType.couldBe(BInitNull)) {
331 env.state.thisType = unopt(std::move(env.state.thisType));
335 bool thisAvailable(ISS& env) {
336 return
337 env.state.thisType.subtypeOf(BObj) &&
338 !env.state.thisType.is(BBottom);
341 Type thisType(ISS& env) {
342 return env.state.thisType;
345 Type thisTypeNonNull(ISS& env) {
346 if (!env.state.thisType.couldBe(TObj)) return TBottom;
347 if (env.state.thisType.couldBe(BInitNull)) return unopt(env.state.thisType);
348 return env.state.thisType;
351 //////////////////////////////////////////////////////////////////////
352 // self
354 inline Optional<Type> selfCls(ISS& env) {
355 return selfCls(env.index, env.ctx);
357 inline Optional<Type> selfClsExact(ISS& env) {
358 return selfClsExact(env.index, env.ctx);
361 inline Optional<Type> parentCls(ISS& env) {
362 return parentCls(env.index, env.ctx);
364 inline Optional<Type> parentClsExact(ISS& env) {
365 return parentClsExact(env.index, env.ctx);
368 // Like selfClsExact, but if the func is non-static, use an object
369 // type instead.
370 inline Type selfExact(ISS& env) {
371 assertx(env.ctx.func);
372 auto ty = selfClsExact(env);
373 if (env.ctx.func->attrs & AttrStatic) {
374 return ty ? *ty : TCls;
376 return ty ? toobj(*ty) : TObj;
379 //////////////////////////////////////////////////////////////////////
380 // class constants
382 inline ClsConstLookupResult lookupClsConstant(const IIndex& index,
383 const Context& ctx,
384 const CollectedInfo* collect,
385 const Type& cls,
386 const Type& name) {
387 // Check if the constant's class is definitely the current context.
388 auto const isClsCtx = [&] {
389 if (!collect || !collect->clsCns) return false;
390 if (!is_specialized_cls(cls)) return false;
391 auto const& dcls = dcls_of(cls);
392 if (!dcls.isExact()) return false;
393 auto const self = selfClsExact(index, ctx);
394 if (!self || !is_specialized_cls(*self)) return false;
395 return dcls.cls().same(dcls_of(*self).cls());
396 }();
398 if (isClsCtx && is_specialized_string(name)) {
399 auto lookup = collect->clsCns->lookup(sval_of(name));
400 if (lookup.found == TriBool::Yes) return lookup;
402 return index.lookup_class_constant(ctx, cls, name);
405 inline ClsConstLookupResult lookupClsConstant(ISS& env,
406 const Type& cls,
407 const Type& name) {
408 return lookupClsConstant(env.index, env.ctx, &env.collect, cls, name);
411 //////////////////////////////////////////////////////////////////////
412 // folding
414 const StaticString s___NEVER_INLINE("__NEVER_INLINE");
416 bool shouldAttemptToFold(ISS& env, const php::Func* func, const FCallArgs& fca,
417 Type context, bool maybeDynamic) {
418 if (!func ||
419 fca.hasUnpack() ||
420 fca.hasGenerics() ||
421 fca.numRets() != 1 ||
422 !will_reduce(env) ||
423 any(env.collect.opts & CollectionOpts::Speculating) ||
424 any(env.collect.opts & CollectionOpts::Optimizing)) {
425 return false;
428 if (maybeDynamic && (
429 (Cfg::Eval::NoticeOnBuiltinDynamicCalls &&
430 (func->attrs & AttrBuiltin)) ||
431 (dyn_call_error_level(func) > 0))) {
432 return false;
435 if (func->userAttributes.count(s___NEVER_INLINE.get())) {
436 return false;
439 // Reified functions may have a mismatch of arity or reified generics
440 // so we cannot fold them
441 // TODO(T31677864): Detect the arity mismatch at HHBBC and enable them to
442 // be foldable
443 if (func->isReified) return false;
445 // Coeffect violation may raise warning or throw an exception
446 if (!fca.skipCoeffectsCheck()) return false;
448 // Readonly violation may raise warning or throw an exception
449 if (fca.enforceReadonly() ||
450 fca.enforceMutableReturn() ||
451 fca.enforceReadonlyThis()) {
452 return false;
455 auto const funcUnit = env.index.lookup_func_unit(*func);
457 // Internal functions may raise module boundary violations
458 if ((func->attrs & AttrInternal) &&
459 env.index.lookup_func_unit(*env.ctx.func)->moduleName !=
460 funcUnit->moduleName) {
461 return false;
464 // Deployment violation may raise raise warning or throw an exception
465 auto const& packageInfo = funcUnit->packageInfo;
466 if (auto const activeDeployment = packageInfo.getActiveDeployment()) {
467 if (!packageInfo.moduleInDeployment(
468 funcUnit->moduleName, *activeDeployment, DeployKind::Hard)) {
469 return false;
473 // We only fold functions when numRets == 1
474 if (func->hasInOutArgs) return false;
476 // Can't fold if we get the wrong amount of arguments
477 if (!check_nargs_in_range(func, fca.numArgs())) return false;
479 // Don't try to fold functions which aren't guaranteed to be accessible at
480 // this call site.
481 if (func->attrs & AttrPrivate) {
482 if (env.ctx.cls != func->cls) return false;
483 } else if (func->attrs & AttrProtected) {
484 assertx(func->cls);
485 if (env.ctx.cls != func->cls) {
486 if (!env.ctx.cls) return false;
487 auto const rcls1 = env.index.resolve_class(env.ctx.cls->name);
488 auto const rcls2 = env.index.resolve_class(func->cls->name);
489 if (!rcls1 || !rcls2) return false;
490 if (!rcls1->exactSubtypeOf(*rcls2, true, true) &&
491 !rcls2->exactSubtypeOf(*rcls1, true, true)) {
492 return false;
497 // Foldable builtins are always worth trying
498 if (func->attrs & AttrIsFoldable) return true;
500 // Any native functions at this point are known to be
501 // non-foldable, but other builtins might be, even if they
502 // don't have the __Foldable attribute.
503 if (func->isNative) return false;
505 if (func->params.size()) return true;
507 auto const rfunc = env.index.resolve_func_or_method(*func);
509 // The function has no args. Check if it's effect free and returns
510 // a literal.
511 auto [retTy, effectFree] = env.index.lookup_return_type(
512 env.ctx,
513 &env.collect.methods,
514 rfunc,
515 Dep::InlineDepthLimit
517 auto const isScalar = is_scalar(retTy);
518 if (effectFree && isScalar) return true;
520 if (!(func->attrs & AttrStatic) && func->cls) {
521 // May be worth trying to fold if the method returns a scalar,
522 // assuming its only "effect" is checking for existence of $this.
523 if (isScalar) return true;
525 // The method may be foldable if we know more about $this.
526 if (is_specialized_obj(context)) {
527 auto const& dobj = dobj_of(context);
528 if (dobj.isExact() ||
529 (dobj.isSub() && dobj.cls().cls() != func->cls) ||
530 (dobj.isIsectAndExact() &&
531 dobj.isectAndExact().first.cls() != func->cls)) {
532 return true;
537 return false;
540 //////////////////////////////////////////////////////////////////////
541 // locals
543 void mayReadLocal(ISS& env, uint32_t id, bool isUse = true) {
544 if (id < env.flags.mayReadLocalSet.size()) {
545 env.flags.mayReadLocalSet.set(id);
547 if (isUse && id < env.flags.usedParams.size()) {
548 env.flags.usedParams.set(id);
552 // Find a local which is equivalent to the given local
553 LocalId findLocEquiv(State& state, const php::Func* func, LocalId l) {
554 if (l >= state.equivLocals.size()) return NoLocalId;
555 assertx(state.equivLocals[l] == NoLocalId || !is_volatile_local(func, l));
556 return state.equivLocals[l];
558 LocalId findLocEquiv(ISS& env, LocalId l) {
559 return findLocEquiv(env.state, env.ctx.func, l);
562 // Given an iterator base local, find an equivalent local that is possibly
563 // better. IterInit/IterNext often uses an unnamed local that came from
564 // a regular local, which would be a better choice if that local was not
565 // manipulated in an unsafe way. Regular locals have lower ids.
566 LocalId findIterBaseLoc(State& state, const php::Func* func, LocalId l) {
567 assertx(l != NoLocalId);
568 auto const locEquiv = findLocEquiv(state, func, l);
569 if (locEquiv == NoLocalId) return l;
570 return std::min(l, locEquiv);
572 LocalId findIterBaseLoc(ISS& env, LocalId l) {
573 return findIterBaseLoc(env.state, env.ctx.func, l);
576 // Find an equivalent local with minimum id
577 LocalId findMinLocEquiv(ISS& env, LocalId l, bool allowUninit) {
578 if (l >= env.state.equivLocals.size() ||
579 env.state.equivLocals[l] == NoLocalId) {
580 return NoLocalId;
583 auto min = l;
584 auto cur = env.state.equivLocals[l];
585 while (cur != l) {
586 if (cur < min && (allowUninit || !peekLocCouldBeUninit(env, cur))) {
587 min = cur;
589 cur = env.state.equivLocals[cur];
591 return min != l ? min : NoLocalId;
594 // Determine whether two locals are equivalent
595 bool locsAreEquiv(ISS& env, LocalId l1, LocalId l2) {
596 if (l1 >= env.state.equivLocals.size() ||
597 l2 >= env.state.equivLocals.size() ||
598 env.state.equivLocals[l1] == NoLocalId ||
599 env.state.equivLocals[l2] == NoLocalId) {
600 return false;
603 auto l = l1;
604 while ((l = env.state.equivLocals[l]) != l1) {
605 if (l == l2) return true;
607 return false;
610 bool locIsThis(ISS& env, LocalId l) {
611 assertx(l <= MaxLocalId);
612 return l == env.state.thisLoc ||
613 (env.state.thisLoc <= MaxLocalId &&
614 locsAreEquiv(env, l, env.state.thisLoc));
617 void killLocEquiv(State& state, LocalId l) {
618 if (l >= state.equivLocals.size()) return;
619 if (state.equivLocals[l] == NoLocalId) return;
620 auto loc = l;
621 do {
622 loc = state.equivLocals[loc];
623 } while (state.equivLocals[loc] != l);
624 assertx(loc != l);
625 if (state.equivLocals[l] == loc) {
626 state.equivLocals[loc] = NoLocalId;
627 } else {
628 state.equivLocals[loc] = state.equivLocals[l];
630 state.equivLocals[l] = NoLocalId;
633 void killLocEquiv(ISS& env, LocalId l) {
634 killLocEquiv(env.state, l);
637 void killAllLocEquiv(ISS& env) {
638 env.state.equivLocals.clear();
641 // Add from to to's equivalency set.
642 void addLocEquiv(ISS& env,
643 LocalId from,
644 LocalId to) {
645 always_assert(peekLocRaw(env, from).subtypeOf(BCell));
646 always_assert(!is_volatile_local(env.ctx.func, to));
647 always_assert(from != to && findLocEquiv(env, from) == NoLocalId);
649 auto m = std::max(to, from);
650 if (env.state.equivLocals.size() <= m) {
651 env.state.equivLocals.resize(m + 1, NoLocalId);
654 if (env.state.equivLocals[to] == NoLocalId) {
655 env.state.equivLocals[from] = to;
656 env.state.equivLocals[to] = from;
657 } else {
658 env.state.equivLocals[from] = env.state.equivLocals[to];
659 env.state.equivLocals[to] = from;
663 // Obtain a local which is equivalent to the given stack value
664 LocalId topStkLocal(const State& state, uint32_t idx = 0) {
665 assertx(idx < state.stack.size());
666 auto const equiv = state.stack[state.stack.size() - idx - 1].equivLoc;
667 return equiv > MaxLocalId ? NoLocalId : equiv;
669 LocalId topStkLocal(ISS& env, uint32_t idx = 0) {
670 return topStkLocal(env.state, idx);
673 // Obtain a location which is equivalent to the given stack value
674 LocalId topStkEquiv(ISS& env, uint32_t idx = 0) {
675 assertx(idx < env.state.stack.size());
676 return env.state.stack[env.state.stack.size() - idx - 1].equivLoc;
679 void setStkLocal(ISS& env, LocalId loc, uint32_t idx = 0) {
680 assertx(loc <= MaxLocalId);
681 always_assert(peekLocRaw(env, loc).subtypeOf(BCell));
682 auto const equiv = [&] {
683 while (true) {
684 auto const e = topStkEquiv(env, idx);
685 if (e != StackDupId) return e;
686 idx++;
688 }();
690 if (equiv <= MaxLocalId) {
691 if (loc == equiv || locsAreEquiv(env, loc, equiv)) return;
692 addLocEquiv(env, loc, equiv);
693 return;
695 env.state.stack[env.state.stack.size() - idx - 1].equivLoc = loc;
698 void killThisLoc(ISS& env, LocalId l) {
699 if (l != NoLocalId ?
700 env.state.thisLoc == l : env.state.thisLoc != NoLocalId) {
701 FTRACE(2, "Killing thisLoc: {}\n", env.state.thisLoc);
702 env.state.thisLoc = NoLocalId;
706 // Kill all equivalencies involving the given local to stack values
707 void killStkEquiv(ISS& env, LocalId l) {
708 for (auto& e : env.state.stack) {
709 if (e.equivLoc != l) continue;
710 e.equivLoc = findLocEquiv(env, l);
711 assertx(e.equivLoc != l);
715 void killAllStkEquiv(ISS& env) {
716 for (auto& e : env.state.stack) {
717 if (e.equivLoc <= MaxLocalId) e.equivLoc = NoLocalId;
721 void killIterEquivs(ISS& env, LocalId l, LocalId key = NoLocalId) {
722 for (auto& i : env.state.iters) {
723 match<void>(
725 [] (DeadIter) {},
726 [&] (LiveIter& iter) {
727 if (iter.keyLocal == l) iter.keyLocal = NoLocalId;
728 if (iter.baseLocal == l) {
729 iter.baseUpdated = true;
730 if (key == NoLocalId || key != iter.keyLocal) {
731 iter.baseLocal = NoLocalId;
739 void killAllIterEquivs(ISS& env) {
740 for (auto& i : env.state.iters) {
741 match<void>(
743 [] (DeadIter) {},
744 [] (LiveIter& iter) {
745 iter.baseUpdated = true;
746 iter.baseLocal = NoLocalId;
747 iter.keyLocal = NoLocalId;
753 void setIterKey(ISS& env, IterId id, LocalId key) {
754 match<void>(
755 env.state.iters[id],
756 [] (DeadIter) {},
757 [&] (LiveIter& iter) { iter.keyLocal = key; }
761 Type peekLocRaw(ISS& env, LocalId l) {
762 auto ret = env.state.locals[l];
763 if (is_volatile_local(env.ctx.func, l)) {
764 always_assert_flog(ret == TCell, "volatile local was not TCell");
766 return ret;
769 Type locRaw(ISS& env, LocalId l) {
770 mayReadLocal(env, l);
771 return peekLocRaw(env, l);
774 void setLocRaw(ISS& env, LocalId l, Type t) {
775 mayReadLocal(env, l);
776 killStkEquiv(env, l);
777 killLocEquiv(env, l);
778 killIterEquivs(env, l);
779 killThisLoc(env, l);
780 if (is_volatile_local(env.ctx.func, l)) {
781 auto current = env.state.locals[l];
782 always_assert_flog(current == TCell, "volatile local was not TCell");
783 return;
785 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
786 env.state.locals[l] = std::move(t);
789 // Read a local type in the sense of CGetL. (TUninits turn into
790 // TInitNull)
791 Type locAsCell(ISS& env, LocalId l) {
792 return to_cell(locRaw(env, l));
795 bool peekLocCouldBeUninit(ISS& env, LocalId l) {
796 return peekLocRaw(env, l).couldBe(BUninit);
799 bool locCouldBeUninit(ISS& env, LocalId l) {
800 return locRaw(env, l).couldBe(BUninit);
804 * Update the known type of a local, based on assertions (e.g. IsType/JmpCC),
805 * rather than an actual modification to the local.
807 void refineLocHelper(ISS& env, LocalId l, Type t) {
808 auto v = peekLocRaw(env, l);
809 assertx(v.subtypeOf(BCell));
810 if (!is_volatile_local(env.ctx.func, l)) {
811 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
812 env.state.locals[l] = std::move(t);
817 * Refine all locals in an equivalence class using fun. Returns false if refined
818 * local is unreachable.
820 template<typename F>
821 bool refineLocation(ISS& env, LocalId l, F fun) {
822 bool ok = true;
823 auto refine = [&] (Type t) {
824 always_assert(t.subtypeOf(BCell));
825 auto i = intersection_of(fun(t), t);
826 if (i.subtypeOf(BBottom)) ok = false;
827 return i;
829 if (l == StackDupId) {
830 auto stkIdx = env.state.stack.size();
831 while (true) {
832 --stkIdx;
833 auto& stk = env.state.stack[stkIdx];
834 if (env.undo) env.undo->onStackWrite(stkIdx, stk.type);
835 stk.type = refine(std::move(stk.type));
836 if (stk.equivLoc != StackDupId) break;
837 assertx(stkIdx > 0);
839 l = env.state.stack[stkIdx].equivLoc;
841 if (l == StackThisId) {
842 if (env.state.thisLoc != NoLocalId) {
843 l = env.state.thisLoc;
845 return ok;
847 if (l == NoLocalId) return ok;
848 assertx(l <= MaxLocalId);
849 auto fixThis = false;
850 auto equiv = findLocEquiv(env, l);
851 if (equiv != NoLocalId) {
852 do {
853 if (equiv == env.state.thisLoc) fixThis = true;
854 refineLocHelper(env, equiv, refine(peekLocRaw(env, equiv)));
855 equiv = findLocEquiv(env, equiv);
856 } while (equiv != l);
858 if (fixThis || l == env.state.thisLoc) {
859 env.state.thisType = refine(env.state.thisType);
861 refineLocHelper(env, l, refine(peekLocRaw(env, l)));
862 return ok;
866 * Refine locals along taken and fallthrough edges.
868 template<typename Taken, typename Fallthrough>
869 void refineLocation(ISS& env, LocalId l,
870 Taken taken, BlockId target, Fallthrough fallthrough) {
871 auto state = env.state;
872 auto const target_reachable = refineLocation(env, l, taken);
873 if (!target_reachable) jmp_nevertaken(env);
874 // swap, so we can restore this state if the branch is always taken.
875 env.state.swap(state);
876 if (!refineLocation(env, l, fallthrough)) { // fallthrough unreachable.
877 jmp_setdest(env, target);
878 env.state.copy_from(std::move(state));
879 } else if (target_reachable) {
880 env.propagate(target, &state);
885 * Set a local type in the sense of tvSet. If the local is boxed or
886 * not known to be not boxed, we can't change the type. May be used
887 * to set locals to types that include Uninit.
889 void setLoc(ISS& env, LocalId l, Type t, LocalId key = NoLocalId) {
890 killStkEquiv(env, l);
891 killLocEquiv(env, l);
892 killIterEquivs(env, l, key);
893 killThisLoc(env, l);
894 mayReadLocal(env, l);
895 refineLocHelper(env, l, std::move(t));
898 LocalId findLocal(ISS& env, SString name) {
899 for (auto& l : env.ctx.func->locals) {
900 if (l.name->same(name)) {
901 mayReadLocal(env, l.id);
902 return l.id;
905 return NoLocalId;
908 void killLocals(ISS& env) {
909 FTRACE(2, " killLocals\n");
910 readUnknownLocals(env);
911 for (size_t l = 0; l < env.state.locals.size(); ++l) {
912 if (env.undo) env.undo->onLocalWrite(l, std::move(env.state.locals[l]));
913 env.state.locals[l] = TCell;
915 killAllLocEquiv(env);
916 killAllStkEquiv(env);
917 killAllIterEquivs(env);
918 killThisLoc(env, NoLocalId);
921 //////////////////////////////////////////////////////////////////////
922 // iterators
924 void setIter(ISS& env, IterId iter, Iter iterState) {
925 env.state.iters[iter] = std::move(iterState);
927 void freeIter(ISS& env, IterId iter) {
928 env.state.iters[iter] = DeadIter {};
931 bool iterIsDead(ISS& env, IterId iter) {
932 return match<bool>(
933 env.state.iters[iter],
934 [] (DeadIter) { return true; },
935 [] (const LiveIter&) { return false; }
939 //////////////////////////////////////////////////////////////////////
940 // properties on $this
943 * Note: we are only tracking control-flow insensitive types for
944 * object properties, because it can be pretty rough to try to track
945 * all cases that could re-enter the VM, run arbitrary code, and
946 * potentially change the type of a property.
948 * Because of this, the various "setter" functions for thisProps
949 * here actually just union the new type into what we already had.
952 Optional<Type> thisPropType(ISS& env, SString name) {
953 if (auto const elem = env.collect.props.readPrivateProp(name)) {
954 return elem->ty;
956 return std::nullopt;
959 bool isMaybeThisPropAttr(ISS& env, SString name, Attr attr) {
960 auto const& raw = env.collect.props.privatePropertiesRaw();
961 auto const it = raw.find(name);
962 // Prop either doesn't exist, or is on an unflattened trait. Be
963 // conservative.
964 if (it == raw.end()) return true;
965 return it->second.attrs & attr;
968 bool isDefinitelyThisPropAttr(ISS& env, SString name, Attr attr) {
969 auto const& raw = env.collect.props.privatePropertiesRaw();
970 auto const it = raw.find(name);
971 // Prop either doesn't exist, or is on an unflattened trait. Be
972 // conservative.
973 if (it == raw.end()) return false;
974 return it->second.attrs & attr;
977 void killThisProps(ISS& env) {
978 FTRACE(2, " killThisProps\n");
979 env.collect.props.mergeInAllPrivateProps(env.index, TCell);
983 * This function returns a type that includes all the possible types
984 * that could result from reading a property $this->name.
986 Optional<Type> thisPropAsCell(ISS& env, SString name) {
987 auto const ty = thisPropType(env, name);
988 if (!ty) return std::nullopt;
989 return to_cell(ty.value());
993 * Merge a type into the tracked property types on $this, in the sense
994 * of tvSet.
996 * Note that all types we see that could go into an object property have to
997 * loosen_all. This is because the object could be serialized and then
998 * deserialized, losing the static-ness of a string or array member, and we
999 * don't guarantee deserialization would preserve a constant value object
1000 * property type.
1002 void mergeThisProp(ISS& env, SString name, Type type) {
1003 env.collect.props.mergeInPrivateProp(
1004 env.index,
1005 name,
1006 loosen_this_prop_for_serialization(*env.ctx.cls, name, std::move(type))
1011 * Merge something into each this prop. Usually MapFn will be a
1012 * predicate that returns TBottom when some condition doesn't hold.
1014 * The types given to the map function are the raw tracked types
1015 * (i.e. could be TUninit).
1017 template<typename MapFn>
1018 void mergeEachThisPropRaw(ISS& env, MapFn fn) {
1019 for (auto const& kv : env.collect.props.privatePropertiesRaw()) {
1020 auto const ty = thisPropType(env, kv.first);
1021 assertx(ty.has_value());
1022 mergeThisProp(env, kv.first, fn(*ty));
1026 void unsetThisProp(ISS& env, SString name) {
1027 mergeThisProp(env, name, TUninit);
1030 void unsetUnknownThisProp(ISS& env) {
1031 env.collect.props.mergeInAllPrivateProps(env.index, TUninit);
1034 //////////////////////////////////////////////////////////////////////
1035 // properties on self::
1037 // Similar to $this properties above, we only track control-flow
1038 // insensitive types for these.
1040 void killPrivateStatics(ISS& env) {
1041 FTRACE(2, " killPrivateStatics\n");
1042 env.collect.props.mergeInAllPrivateStatics(env.index, TInitCell, true, false);
1045 //////////////////////////////////////////////////////////////////////
1046 // misc
1048 inline void propInitialValue(ISS& env,
1049 const php::Prop& prop,
1050 TypedValue val,
1051 bool satisfies,
1052 bool deepInit) {
1053 FTRACE(2, " propInitialValue \"{}\" -> {}{}{}\n",
1054 prop.name, show(from_cell(val)),
1055 satisfies ? " (initial satisfies TC)" : "",
1056 deepInit ? " (deep init)" : "");
1057 env.collect.props.setInitialValue(prop, val, satisfies, deepInit);
1060 inline PropMergeResult mergeStaticProp(ISS& env,
1061 const Type& self,
1062 const Type& name,
1063 const Type& val,
1064 bool checkUB = false,
1065 bool ignoreConst = false,
1066 bool mustBeReadOnly = false) {
1067 FTRACE(2, " mergeStaticProp {}::{} -> {}\n",
1068 show(self), show(name), show(val));
1069 return env.index.merge_static_type(
1070 env.ctx,
1071 env.collect.publicSPropMutations,
1072 env.collect.props,
1073 self,
1074 name,
1075 val,
1076 checkUB,
1077 ignoreConst,
1078 mustBeReadOnly
1082 inline MemoSets memoGet(ISS& env) {
1083 env.collect.allMemoGets.emplace(env.bid);
1084 return env.collect.allMemoSets;
1087 inline void memoSet(ISS& env, Type t, Type wh, bool effectFree) {
1088 auto reflow = false;
1090 t |= env.collect.allMemoSets.retTy;
1091 if (env.collect.allMemoSets.retTy.strictSubtypeOf(t)) {
1092 env.collect.allMemoSets.retTy = std::move(t);
1093 reflow = true;
1095 wh |= env.collect.allMemoSets.waitHandleRetTy;
1096 if (env.collect.allMemoSets.waitHandleRetTy.strictSubtypeOf(wh)) {
1097 env.collect.allMemoSets.waitHandleRetTy = std::move(wh);
1098 reflow = true;
1100 if (!effectFree && env.collect.allMemoSets.effectFree) {
1101 env.collect.allMemoSets.effectFree = false;
1102 reflow = true;
1104 if (reflow) {
1105 for (auto const bid : env.collect.allMemoGets) {
1106 env.propagate(bid, nullptr);
1111 //////////////////////////////////////////////////////////////////////
1113 #ifdef __clang__
1114 #pragma clang diagnostic pop
1115 #endif
1118 //////////////////////////////////////////////////////////////////////