1 /* ***** BEGIN LICENSE BLOCK *****
2 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
4 * The contents of this file are subject to the Mozilla Public License Version 1.1 (the
5 * "License"); you may not use this file except in compliance with the License. You may obtain
6 * a copy of the License at http://www.mozilla.org/MPL/
8 * Software distributed under the License is distributed on an "AS IS" basis, WITHOUT
9 * WARRANTY OF ANY KIND, either express or implied. See the License for the specific
10 * language governing rights and limitations under the License.
12 * The Original Code is [Open Source Virtual Machine.]
14 * The Initial Developer of the Original Code is Adobe System Incorporated. Portions created
15 * by the Initial Developer are Copyright (C)[ 2004-2006 ] Adobe Systems Incorporated. All Rights
18 * Contributor(s): Adobe AS3 Team
19 * Andreas Gal <gal@mozilla.com>
20 * Asko Tontti <atontti@cc.hut.fi>
22 * Alternatively, the contents of this file may be used under the terms of either the GNU
23 * General Public License Version 2 or later (the "GPL"), or the GNU Lesser General Public
24 * License Version 2.1 or later (the "LGPL"), in which case the provisions of the GPL or the
25 * LGPL are applicable instead of those above. If you wish to allow use of your version of this
26 * file only under the terms of either the GPL or the LGPL, and not to allow others to use your
27 * version of this file under the terms of the MPL, indicate your decision by deleting provisions
28 * above and replace them with the notice and other provisions required by the GPL or the
29 * LGPL. If you do not delete the provisions above, a recipient may use your version of this file
30 * under the terms of any one of the MPL, the GPL or the LGPL.
32 ***** END LICENSE BLOCK ***** */
42 #if defined(AVMPLUS_LINUX) || defined(DARWIN) || defined(__FreeBSD__)
49 #define FASTCALL JS_FASTCALL
51 #if defined(JS_NO_FASTCALL)
52 #define NJ_NO_FASTCALL
53 #if defined(AVMPLUS_IA32)
54 #define SIMULATE_FASTCALL(lr, state_ptr, frag_ptr, func_addr) \
58 : "c" (state_ptr), "d" (frag_ptr), "S" (func_addr) \
61 #endif /* defined(AVMPLUS_IA32) */
62 #endif /* defined(JS_NO_FASTCALL) */
78 void NanoAssertFail();
81 #define AvmAssert(x) assert(x)
82 #define AvmAssertMsg(x, y)
83 #define AvmDebugLog(x) printf x
87 * Can we just take a moment to think about what it means that MSVC doesn't have stdint.h in 2008?
88 * Thanks for your time.
90 typedef JSUint8
uint8_t;
91 typedef JSInt8
int8_t;
92 typedef JSUint16
uint16_t;
93 typedef JSInt16
int16_t;
94 typedef JSUint32
uint32_t;
95 typedef JSInt32
int32_t;
96 typedef JSUint64
uint64_t;
97 typedef JSInt64
int64_t;
102 #if defined(_MSC_VER) && defined(AVMPLUS_IA32)
103 __declspec(naked
) static inline __int64
rdtsc()
113 #if defined(__i386__)
115 static __inline__
unsigned long long rdtsc(void)
117 unsigned long long int x
;
118 __asm__
volatile (".byte 0x0f, 0x31" : "=A" (x
));
121 #elif defined(__x86_64__)
123 static __inline__
uint64_t rdtsc(void)
126 __asm__
__volatile__ ("rdtsc" : "=a"(lo
), "=d"(hi
));
127 return ( (uint64_t)lo
)|( ((uint64_t)hi
)<<32 );
130 #elif defined(__powerpc__)
132 typedef unsigned long long int unsigned long long;
134 static __inline__
unsigned long long rdtsc(void)
136 unsigned long long int result
=0;
137 unsigned long int upper
, lower
,tmp
;
145 : "=r"(upper
),"=r"(lower
),"=r"(tmp
)
149 result
= result
|lower
;
179 uint32 numGlobalSlots
;
180 uint32 numStackSlots
;
181 uint32 numStackSlotsBelowCurrentFrame
;
184 #if defined NJ_VERBOSE
198 GuardRecord
*outgoing
;
202 #if defined NJ_VERBOSE
208 #define GuardRecordSize(g) sizeof(GuardRecord)
209 #define SideExitSize(e) sizeof(SideExit)
215 static void operator delete (void *gcObject
)
221 #define MMGC_SUBCLASS_DECL : public GCObject
223 class GCFinalizedObject
: public GCObject
226 static void operator delete (void *gcObject
)
235 int32_t kNativePageSize
;
239 #if defined _SC_PAGE_SIZE
240 kNativePageSize
= sysconf(_SC_PAGE_SIZE
);
242 kNativePageSize
= 4096; // @todo: what is this?
247 Alloc(uint32_t pages
)
250 return VirtualAlloc(NULL
,
251 pages
* kNativePageSize
,
252 MEM_COMMIT
| MEM_RESERVE
,
253 PAGE_EXECUTE_READWRITE
);
254 #elif defined AVMPLUS_LINUX || defined DARWIN
256 * Don't use normal heap with mprotect+PROT_EXEC for executable code.
257 * SELinux and friends don't allow this.
260 pages
* kNativePageSize
,
261 PROT_READ
| PROT_WRITE
| PROT_EXEC
,
262 MAP_PRIVATE
| MAP_ANON
,
266 return valloc(pages
* kNativePageSize
);
271 Free(void* p
, uint32_t pages
)
274 VirtualFree(p
, 0, MEM_RELEASE
);
275 #elif defined AVMPLUS_LINUX || defined DARWIN
276 munmap(p
, pages
* kNativePageSize
);
290 Alloc(uint32_t bytes
)
292 return calloc(1, bytes
);
301 static inline GCHeap
*
309 operator new(size_t size
, GC
* gc
)
311 return calloc(1, size
);
315 operator delete(void* p
)
323 #define MMGC_MEM_TYPE(x)
325 typedef int FunctionID
;
331 void* sp
; /* native stack pointer, stack[0] is spbase[0] */
332 void* rp
; /* call stack pointer */
333 void* gp
; /* global frame pointer */
334 JSContext
*cx
; /* current VM context handle */
335 void* eos
; /* first unusable word after the native stack */
336 void* eor
; /* first unusable word after the call stack */
337 nanojit::GuardRecord
* nestedExit
; /* innermost nested guard for NESTED_EXIT exits */
344 typedef class String AvmString
;
346 class StringNullTerminatedUTF8
351 StringNullTerminatedUTF8(GC
* gc
, String
* s
)
353 cstr
= strdup((const char*)s
);
356 ~StringNullTerminatedUTF8()
368 typedef String
* Stringp
;
370 class AvmConfiguration
374 memset(this, 0, sizeof(AvmConfiguration
));
376 verbose
= getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
386 uint32_t quiet_opt
:1;
388 uint32_t verbose_addrs
:1;
389 uint32_t verbose_live
:1;
390 uint32_t verbose_exits
:1;
391 uint32_t show_stats
:1;
394 static const int kstrconst_emptyString
= 0;
400 const char* format(const void* ip
)
403 sprintf(buf
, "%p", ip
);
422 AvmConsole
& operator<<(const char* s
)
424 fprintf(stdout
, "%s", s
);
432 AvmInterpreter interp
;
435 static AvmConfiguration config
;
437 static String
* k_str
[];
438 static bool sse2_available
;
443 return sse2_available
;
449 return config
.quiet_opt
;
455 return config
.verbose
;
464 static inline String
* newString(const char* cstr
) {
465 return (String
*)strdup(cstr
);
479 * The List<T> template implements a simple List, which can
480 * be templated to support different types.
482 * Elements can be added to the end, modified in the middle,
483 * but no holes are allowed. That is for set(n, v) to work
486 * Note that [] operators are provided and you can violate the
487 * set properties using these operators, if you want a real
488 * list dont use the [] operators, if you want a general purpose
489 * array use the [] operators.
492 enum ListElementType
{ LIST_NonGCObjects
, LIST_GCObjects
};
494 template <typename T
, ListElementType kElementType
>
498 enum { kInitialCapacity
= 128 };
500 List(GC
*_gc
, uint32_t _capacity
=kInitialCapacity
) : data(NULL
), len(0), capacity(0)
502 ensureCapacity(_capacity
);
509 // zero out in case we are part of an RCObject
513 inline void destroy()
519 // 'this' steals the guts of 'that' and 'that' gets reset.
520 void FASTCALL
become(List
& that
)
524 this->data
= that
.data
;
525 this->len
= that
.len
;
526 this->capacity
= that
.capacity
;
532 uint32_t FASTCALL
add(T value
)
534 if (len
>= capacity
) {
541 inline bool isEmpty() const
546 inline uint32_t size() const
551 inline T
get(uint32_t index
) const
553 AvmAssert(index
< len
);
554 return *(T
*)(data
+ index
);
557 void FASTCALL
set(uint32_t index
, T value
)
559 AvmAssert(index
< capacity
);
564 AvmAssert(len
<= capacity
);
574 int FASTCALL
indexOf(T value
) const
576 for(uint32_t i
=0; i
<len
; i
++)
582 int FASTCALL
lastIndexOf(T value
) const
584 for(int32_t i
=len
-1; i
>=0; i
--)
595 T FASTCALL
removeLast()
598 return undef_list_val();
600 set(len
-1, undef_list_val());
605 inline T
operator[](uint32_t index
) const
607 AvmAssert(index
< capacity
);
611 void FASTCALL
ensureCapacity(uint32_t cap
)
613 if (cap
> capacity
) {
615 data
= (T
*)calloc(1, factor(cap
));
617 data
= (T
*)realloc(data
, factor(cap
));
618 zero_range(capacity
, cap
- capacity
);
624 void FASTCALL
insert(uint32_t index
, T value
, uint32_t count
= 1)
626 AvmAssert(index
<= len
);
627 AvmAssert(count
> 0);
628 ensureCapacity(len
+count
);
629 memmove(data
+ index
+ count
, data
+ index
, factor(len
- index
));
630 wbzm(index
, index
+count
, value
);
634 T FASTCALL
removeAt(uint32_t index
)
637 // dec the refcount on the one we're removing
638 wb(index
, undef_list_val());
639 memmove(data
+ index
, data
+ index
+ 1, factor(len
- index
- 1));
647 // growth is fast at first, then slows at larger list sizes.
649 const uint32_t curMax
= capacity
;
651 newMax
= kInitialCapacity
;
653 newMax
= curMax
* 3/2;
657 ensureCapacity(newMax
);
660 inline void do_wb_nongc(T
* slot
, T value
)
665 inline void do_wb_gc(GCObject
** slot
, const GCObject
** value
)
667 *slot
= (GCObject
*)*value
;
670 void FASTCALL
wb(uint32_t index
, T value
)
672 AvmAssert(index
< capacity
);
673 AvmAssert(data
!= NULL
);
674 T
* slot
= &data
[index
];
675 do_wb_nongc(slot
, value
);
678 // multiple wb call with the same value, and assumption that existing value is all zero bits,
680 // for (uint32_t u = index; u < index_end; ++u)
682 void FASTCALL
wbzm(uint32_t index
, uint32_t index_end
, T value
)
684 AvmAssert(index
< capacity
);
685 AvmAssert(index_end
<= capacity
);
686 AvmAssert(index
< index_end
);
687 AvmAssert(data
!= NULL
);
688 T
* slot
= data
+ index
;
689 for ( ; index
< index_end
; ++index
, ++slot
)
690 do_wb_nongc(slot
, value
);
693 inline uint32_t factor(uint32_t index
) const
695 return index
* sizeof(T
);
698 void FASTCALL
zero_range(uint32_t _first
, uint32_t _count
)
700 memset(data
+ _first
, 0, factor(_count
));
703 // stuff that needs specialization based on the type
704 static inline T
undef_list_val();
707 List(const List
& toCopy
); // unimplemented
708 void operator=(const List
& that
); // unimplemented
710 // ------------------------ DATA SECTION BEGIN
715 // ------------------------ DATA SECTION END
719 // stuff that needs specialization based on the type
720 template<typename T
, ListElementType kElementType
>
721 /* static */ inline T List
<T
, kElementType
>::undef_list_val() { return T(0); }
724 * The SortedMap<K,T> template implements an object that
725 * maps keys to values. The keys are sorted
726 * from smallest to largest in the map. Time of operations
728 * put() is O(1) if the key is higher than any existing
729 * key; O(logN) if the key already exists,
730 * and O(N) otherwise.
731 * get() is an O(logN) binary search.
733 * no duplicates are allowed.
735 template <class K
, class T
, ListElementType valType
>
739 enum { kInitialCapacity
= 64 };
741 SortedMap(GC
* gc
, int _capacity
=kInitialCapacity
)
742 : keys(gc
, _capacity
), values(gc
, _capacity
)
748 return keys
.size() == 0;
770 if (keys
.size() == 0 || k
> keys
.last())
788 i
= -i
- 1; // recover the insertion point
789 AvmAssert(keys
.size() != (uint32_t)i
);
800 return i
>= 0 ? values
[i
] : 0;
803 bool get(K k
, T
& v
) const
814 bool containsKey(K k
) const
817 return (i
>= 0) ? true : false;
828 T old
= values
.removeAt(i
);
833 T
removeFirst() { return isEmpty() ? (T
)0 : removeAt(0); }
834 T
removeLast() { return isEmpty() ? (T
)0 : removeAt(keys
.size()-1); }
835 T
first() const { return isEmpty() ? (T
)0 : values
[0]; }
836 T
last() const { return isEmpty() ? (T
)0 : values
[keys
.size()-1]; }
838 K
firstKey() const { return isEmpty() ? 0 : keys
[0]; }
839 K
lastKey() const { return isEmpty() ? 0 : keys
[keys
.size()-1]; }
842 T
at(int i
) const { return values
[i
]; }
843 K
keyAt(int i
) const { return keys
[i
]; }
845 int findNear(K k
) const {
847 return i
>= 0 ? i
: -i
-2;
850 List
<K
, LIST_NonGCObjects
> keys
;
851 List
<T
, valType
> values
;
856 int hi
= keys
.size()-1;
867 return i
; // key found
869 return -(lo
+ 1); // key not found, low is the insertion point
873 #define GCSortedMap SortedMap
876 * Bit vectors are an efficent method of keeping True/False information
877 * on a set of items or conditions. Class BitSet provides functions
878 * to manipulate individual bits in the vector.
880 * Since most vectors are rather small an array of longs is used by
881 * default to house the value of the bits. If more bits are needed
882 * then an array is allocated dynamically outside of this object.
884 * This object is not optimized for a fixed sized bit vector
885 * it instead allows for dynamically growing the bit vector.
890 enum { kUnit
= 8*sizeof(long),
891 kDefaultCapacity
= 4 };
895 capacity
= kDefaultCapacity
;
901 if (capacity
> kDefaultCapacity
)
907 if (capacity
> kDefaultCapacity
)
908 for(int i
=0; i
<capacity
; i
++)
911 for(int i
=0; i
<capacity
; i
++)
915 void set(GC
*gc
, int bitNbr
)
917 int index
= bitNbr
/ kUnit
;
918 int bit
= bitNbr
% kUnit
;
919 if (index
>= capacity
)
922 if (capacity
> kDefaultCapacity
)
923 bits
.ptr
[index
] |= (1<<bit
);
925 bits
.ar
[index
] |= (1<<bit
);
928 void clear(int bitNbr
)
930 int index
= bitNbr
/ kUnit
;
931 int bit
= bitNbr
% kUnit
;
932 if (index
< capacity
)
934 if (capacity
> kDefaultCapacity
)
935 bits
.ptr
[index
] &= ~(1<<bit
);
937 bits
.ar
[index
] &= ~(1<<bit
);
941 bool get(int bitNbr
) const
943 int index
= bitNbr
/ kUnit
;
944 int bit
= bitNbr
% kUnit
;
946 if (index
< capacity
)
948 if (capacity
> kDefaultCapacity
)
949 value
= ( bits
.ptr
[index
] & (1<<bit
) ) ? true : false;
951 value
= ( bits
.ar
[index
] & (1<<bit
) ) ? true : false;
957 // Grow the array until at least newCapacity big
958 void grow(GC
*gc
, int newCapacity
)
960 // create vector that is 2x bigger than requested
962 //MEMTAG("BitVector::Grow - long[]");
963 long* newBits
= (long*)calloc(1, newCapacity
* sizeof(long));
964 //memset(newBits, 0, newCapacity * sizeof(long));
967 if (capacity
> kDefaultCapacity
)
968 for(int i
=0; i
<capacity
; i
++)
969 newBits
[i
] = bits
.ptr
[i
];
971 for(int i
=0; i
<capacity
; i
++)
972 newBits
[i
] = bits
.ar
[i
];
974 // in with the new out with the old
975 if (capacity
> kDefaultCapacity
)
979 capacity
= newCapacity
;
982 // by default we use the array, but if the vector
983 // size grows beyond kDefaultCapacity we allocate
984 // space dynamically.
988 long ar
[kDefaultCapacity
];