1 # SPDX-FileCopyrightText: 2023-2024 Sebastian Schrand
2 # 2017-2022 Jens M. Plonka
3 # 2005-2018 Philippe Lagadec
5 # SPDX-License-Identifier: GPL-2.0-or-later
7 # Import is based on using information from `olefile` IO source-code
8 # and the FreeCAD Autodesk 3DS Max importer ImportMAX.
10 # `olefile` (formerly OleFileIO_PL) is copyright Philippe Lagadec.
11 # (https://www.decalage.info)
13 # ImportMAX is copyright Jens M. Plonka.
14 # (https://www.github.com/jmplonka/Importer3D)
27 from bpy_extras
.node_shader_utils
import PrincipledBSDFWrapper
34 MAGIC
= b
'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1'
35 WORD_CLSID
= "00020900-0000-0000-C000-000000000046"
38 UNKNOWN_SIZE
= 0x7FFFFFFF
39 MAXFILE_SIZE
= 0x7FFFFFFFFFFFFFFF
40 MAXREGSECT
= 0xFFFFFFFA # (-6) maximum SECT
41 DIFSECT
= 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT
42 FATSECT
= 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT
43 ENDOFCHAIN
= 0xFFFFFFFE # (-2) end of a virtual stream chain
44 FREESECT
= 0xFFFFFFFF # (-1) unallocated sector
45 MAX_STREAM
= 2 # element is a stream object
46 ROOT_STORE
= 5 # element is a root storage
49 INVALID_NAME
= re
.compile('^[0-9].*')
50 UNPACK_BOX_DATA
= struct
.Struct('<HIHHBff').unpack_from
# Index, int, 2short, byte, 2float
52 FLOAT_POINT
= 0x71F11549498702E7 # Float Wire
53 MATRIX_POS
= 0xFFEE238A118F7E02 # Position XYZ
54 MATRIX_ROT
= 0x3A90416731381913 # Rotation Wire
55 MATRIX_SCL
= 0xFEEE238B118F7C01 # Scale XYZ
56 EDIT_MESH
= 0x00000000E44F10B3 # Editable Mesh
57 EDIT_POLY
= 0x192F60981BF8338D # Editable Poly
58 CORO_MTL
= 0x448931dd70be6506 # CoronaMtl
59 ARCH_MTL
= 0x4A16365470B05735 # ArchMtl
60 VRAY_MTL
= 0x7034695C37BF3F2F # VRayMtl
63 0x0000000000001002: 'Camera',
64 0x0000000000001011: 'Omni',
65 0x0000000000001013: 'Free Direct',
66 0x0000000000001020: 'Camera Target',
67 0x0000000000001040: 'Line',
68 0x0000000000001065: 'Rectangle',
69 0x0000000000001097: 'Ellipse',
70 0x0000000000001999: 'Circle',
71 0x0000000000002013: 'Point',
72 0x0000000000009125: 'Biped Object',
73 0x0000000000876234: 'Dummy',
74 0x05622B0D69011E82: 'Compass',
75 0x12A822FB76A11646: 'CV Surface',
76 0x1EB3430074F93B07: 'Particle View',
77 0x2ECCA84028BF6E8D: 'Bone',
78 0x3BDB0E0C628140F6: 'VRayPlane',
79 0x4E9B599047DB14EF: 'Slider',
80 0x522E47057BF61478: 'Sky',
81 0x5FD602DF3C5575A1: 'VRayLight',
82 0x77566F65081F1DFC: 'Plane',
93 def get_valid_name(name
):
94 if (INVALID_NAME
.match(name
)):
95 return "_%s" % (name
.encode('utf8'))
96 return "%s" % (name
.encode('utf8'))
100 return data
if data
.__class
__ is int else data
[0]
103 def i16(data
, offset
=0):
104 return struct
.unpack("<H", data
[offset
:offset
+ 2])[0]
107 def i32(data
, offset
=0):
108 return struct
.unpack("<I", data
[offset
:offset
+ 4])[0]
111 def get_byte(data
, offset
=0):
113 value
= struct
.unpack('<B', data
[offset
:size
])[0]
117 def get_short(data
, offset
=0):
119 value
= struct
.unpack('<H', data
[offset
:size
])[0]
123 def get_long(data
, offset
=0):
125 value
= struct
.unpack('<I', data
[offset
:size
])[0]
129 def get_float(data
, offset
=0):
131 value
= struct
.unpack('<f', data
[offset
:size
])[0]
135 def get_bytes(data
, offset
=0, count
=1):
136 size
= offset
+ count
137 values
= struct
.unpack('<' + 'B' * count
, data
[offset
:size
])
141 def get_shorts(data
, offset
=0, count
=1):
142 size
= offset
+ count
* 2
143 values
= struct
.unpack('<' + 'H' * count
, data
[offset
:size
])
147 def get_longs(data
, offset
=0, count
=1):
148 size
= offset
+ count
* 4
149 values
= struct
.unpack('<' + 'I' * count
, data
[offset
:size
])
153 def get_floats(data
, offset
=0, count
=1):
154 size
= offset
+ count
* 4
155 values
= struct
.unpack('<' + 'f' * count
, data
[offset
:size
])
160 """Converts a CLSID to a readable string."""
161 assert len(clsid
) == 16
162 if not clsid
.strip(b
"\0"):
164 return (("%08X-%04X-%04X-%02X%02X-" + "%02X" * 6) %
165 ((i32(clsid
, 0), i16(clsid
, 4), i16(clsid
, 6)) +
166 tuple(map(i8
, clsid
[8:16]))))
173 def is_maxfile(filename
):
174 """Test if file is a MAX OLE2 container."""
175 if hasattr(filename
, 'read'):
176 header
= filename
.read(len(MAGIC
))
178 elif isinstance(filename
, bytes
) and len(filename
) >= MIN_FILE_SIZE
:
179 header
= filename
[:len(MAGIC
)]
181 with
open(filename
, 'rb') as fp
:
182 header
= fp
.read(len(MAGIC
))
189 class MaxStream(io
.BytesIO
):
190 """Returns an instance of the BytesIO class as read-only file object."""
192 def __init__(self
, fp
, sect
, size
, offset
, sectorsize
, fat
, filesize
):
193 if size
== UNKNOWN_SIZE
:
194 size
= len(fat
) * sectorsize
195 nb_sectors
= (size
+ (sectorsize
- 1)) // sectorsize
198 for i
in range(nb_sectors
):
200 fp
.seek(offset
+ sectorsize
* sect
)
203 sector_data
= fp
.read(sectorsize
)
204 data
.append(sector_data
)
206 sect
= fat
[sect
] & FREESECT
209 data
= b
"".join(data
)
210 if len(data
) >= size
:
214 self
.size
= len(data
)
215 io
.BytesIO
.__init
__(self
, data
)
218 class MaxFileDirEntry
:
219 """Directory Entry for a stream or storage."""
220 STRUCT_DIRENTRY
= '<64sHBBIII16sIQQIII'
222 assert struct
.calcsize(STRUCT_DIRENTRY
) == DIRENTRY_SIZE
224 def __init__(self
, entry
, sid
, maxfile
):
226 self
.maxfile
= maxfile
245 ) = struct
.unpack(MaxFileDirEntry
.STRUCT_DIRENTRY
, entry
)
247 if self
.namelength
> 64:
249 self
.name_utf16
= self
.name_raw
[:(self
.namelength
- 2)]
250 self
.name
= maxfile
._decode
_utf
16_str
(self
.name_utf16
)
251 # print('DirEntry SID=%d: %s' % (self.sid, repr(self.name)))
252 if maxfile
.sectorsize
== 512:
253 self
.size
= self
.sizeLow
255 self
.size
= self
.sizeLow
+ (int(self
.sizeHigh
) << 32)
256 self
.clsid
= _clsid(clsid
)
257 self
.is_minifat
= False
258 if self
.entry_type
in (ROOT_STORE
, MAX_STREAM
) and self
.size
> 0:
259 if self
.size
< maxfile
.minisectorcutoff \
260 and self
.entry_type
== MAX_STREAM
: # only streams can be in MiniFAT
261 self
.is_minifat
= True
263 self
.is_minifat
= False
264 maxfile
._check
_duplicate
_stream
(self
.isectStart
, self
.is_minifat
)
265 self
.sect_chain
= None
267 def build_sect_chain(self
, maxfile
):
270 if self
.entry_type
not in (ROOT_STORE
, MAX_STREAM
) or self
.size
== 0:
272 self
.sect_chain
= list()
273 if self
.is_minifat
and not maxfile
.minifat
:
274 maxfile
.loadminifat()
275 next_sect
= self
.isectStart
276 while next_sect
!= ENDOFCHAIN
:
277 self
.sect_chain
.append(next_sect
)
279 next_sect
= maxfile
.minifat
[next_sect
]
281 next_sect
= maxfile
.fat
[next_sect
]
283 def build_storage_tree(self
):
284 if self
.sid_child
!= FREESECT
:
285 self
.append_kids(self
.sid_child
)
288 def append_kids(self
, child_sid
):
289 if child_sid
== FREESECT
:
292 child
= self
.maxfile
._load
_direntry
(child_sid
)
296 self
.append_kids(child
.sid_left
)
297 name_lower
= child
.name
.lower()
298 self
.kids
.append(child
)
299 self
.kids_dict
[name_lower
] = child
300 self
.append_kids(child
.sid_right
)
301 child
.build_storage_tree()
303 def __eq__(self
, other
):
304 return self
.name
== other
.name
306 def __lt__(self
, other
):
307 return self
.name
< other
.name
309 def __ne__(self
, other
):
310 return not self
.__eq
__(other
)
312 def __le__(self
, other
):
313 return self
.__eq
__(other
) or self
.__lt
__(other
)
317 """Representing an interface for importing .max files."""
319 def __init__(self
, filename
=None):
320 self
._filesize
= None
321 self
.byte_order
= None
322 self
.directory_fp
= None
323 self
.direntries
= None
324 self
.dll_version
= None
326 self
.first_difat_sector
= None
327 self
.first_dir_sector
= None
328 self
.first_mini_fat_sector
= None
330 self
.header_clsid
= None
331 self
.header_signature
= None
332 self
.mini_sector_shift
= None
333 self
.mini_sector_size
= None
334 self
.mini_stream_cutoff_size
= None
336 self
.minifatsect
= None
337 self
.minisectorcutoff
= None
338 self
.minisectorsize
= None
339 self
.ministream
= None
340 self
.minor_version
= None
342 self
.num_difat_sectors
= None
343 self
.num_dir_sectors
= None
344 self
.num_fat_sectors
= None
345 self
.num_mini_fat_sectors
= None
346 self
.reserved1
= None
347 self
.reserved2
= None
349 self
.sector_shift
= None
350 self
.sector_size
= None
351 self
.transaction_signature_number
= None
358 def __exit__(self
, *args
):
361 def _decode_utf16_str(self
, utf16_str
, errors
='replace'):
362 unicode_str
= utf16_str
.decode('UTF-16LE', errors
)
365 def open(self
, filename
):
366 if hasattr(filename
, 'read'):
368 elif isinstance(filename
, bytes
) and len(filename
) >= MIN_FILE_SIZE
:
369 self
.fp
= io
.BytesIO(filename
)
371 self
.fp
= open(filename
, 'rb')
373 self
.fp
.seek(0, os
.SEEK_END
)
375 filesize
= self
.fp
.tell()
378 self
._filesize
= filesize
379 self
._used
_streams
_fat
= []
380 self
._used
_streams
_minifat
= []
381 header
= self
.fp
.read(512)
382 fmt_header
= '<8s16sHHHHHHLLLLLLLLLL'
383 header_size
= struct
.calcsize(fmt_header
)
384 header1
= header
[:header_size
]
386 self
.header_signature
,
392 self
.mini_sector_shift
,
395 self
.num_dir_sectors
,
396 self
.num_fat_sectors
,
397 self
.first_dir_sector
,
398 self
.transaction_signature_number
,
399 self
.mini_stream_cutoff_size
,
400 self
.first_mini_fat_sector
,
401 self
.num_mini_fat_sectors
,
402 self
.first_difat_sector
,
403 self
.num_difat_sectors
404 ) = struct
.unpack(fmt_header
, header1
)
406 self
.sector_size
= 2**self
.sector_shift
407 self
.mini_sector_size
= 2**self
.mini_sector_shift
408 if self
.mini_stream_cutoff_size
!= 0x1000:
409 self
.mini_stream_cutoff_size
= 0x1000
410 self
.nb_sect
= ((filesize
+ self
.sector_size
- 1) // self
.sector_size
) - 1
413 self
.header_clsid
= _clsid(header
[8:24])
414 self
.sectorsize
= self
.sector_size
# i16(header, 30)
415 self
.minisectorsize
= self
.mini_sector_size
# i16(header, 32)
416 self
.minisectorcutoff
= self
.mini_stream_cutoff_size
# i32(header, 56)
417 self
._check
_duplicate
_stream
(self
.first_dir_sector
)
418 if self
.num_mini_fat_sectors
:
419 self
._check
_duplicate
_stream
(self
.first_mini_fat_sector
)
420 if self
.num_difat_sectors
:
421 self
._check
_duplicate
_stream
(self
.first_difat_sector
)
423 # Load file allocation tables
425 self
.loaddirectory(self
.first_dir_sector
)
426 self
.minifatsect
= self
.first_mini_fat_sector
431 def _check_duplicate_stream(self
, first_sect
, minifat
=False):
433 used_streams
= self
._used
_streams
_minifat
435 if first_sect
in (DIFSECT
, FATSECT
, ENDOFCHAIN
, FREESECT
):
437 used_streams
= self
._used
_streams
_fat
438 if first_sect
in used_streams
:
441 used_streams
.append(first_sect
)
443 def sector_array(self
, sect
):
444 ary
= array
.array('I', sect
)
445 if sys
.byteorder
== 'big':
449 def loadfat_sect(self
, sect
):
450 if isinstance(sect
, array
.array
):
453 fat1
= self
.sector_array(sect
)
456 isect
= isect
& FREESECT
457 if isect
== ENDOFCHAIN
or isect
== FREESECT
:
459 sector
= self
.getsect(isect
)
460 nextfat
= self
.sector_array(sector
)
461 self
.fat
= self
.fat
+ nextfat
464 def loadfat(self
, header
):
465 sect
= header
[76:512]
466 self
.fat
= array
.array('I')
467 self
.loadfat_sect(sect
)
468 if self
.num_difat_sectors
!= 0:
469 nb_difat_sectors
= (self
.sectorsize
// 4) - 1
470 nb_difat
= (self
.num_fat_sectors
- 109 + nb_difat_sectors
- 1) // nb_difat_sectors
471 isect_difat
= self
.first_difat_sector
472 for i
in range(nb_difat
):
473 sector_difat
= self
.getsect(isect_difat
)
474 difat
= self
.sector_array(sector_difat
)
475 self
.loadfat_sect(difat
[:nb_difat_sectors
])
476 isect_difat
= difat
[nb_difat_sectors
]
477 if len(self
.fat
) > self
.nb_sect
:
478 self
.fat
= self
.fat
[:self
.nb_sect
]
480 def loadminifat(self
):
481 stream_size
= self
.num_mini_fat_sectors
* self
.sector_size
482 nb_minisectors
= (self
.root
.size
+ self
.mini_sector_size
- 1) // self
.mini_sector_size
483 used_size
= nb_minisectors
* 4
484 sect
= self
._open
(self
.minifatsect
, stream_size
, force_FAT
=True).read()
485 self
.minifat
= self
.sector_array(sect
)
486 self
.minifat
= self
.minifat
[:nb_minisectors
]
488 def getsect(self
, sect
):
490 self
.fp
.seek(self
.sectorsize
* (sect
+ 1))
492 print('IndexError: Sector index out of range')
493 sector
= self
.fp
.read(self
.sectorsize
)
496 def loaddirectory(self
, sect
):
497 self
.directory_fp
= self
._open
(sect
, force_FAT
=True)
498 max_entries
= self
.directory_fp
.size
// 128
499 self
.direntries
= [None] * max_entries
500 root_entry
= self
._load
_direntry
(0)
501 self
.root
= self
.direntries
[0]
502 self
.root
.build_storage_tree()
504 def _load_direntry(self
, sid
):
505 if self
.direntries
[sid
] is not None:
506 return self
.direntries
[sid
]
507 self
.directory_fp
.seek(sid
* 128)
508 entry
= self
.directory_fp
.read(128)
509 self
.direntries
[sid
] = MaxFileDirEntry(entry
, sid
, self
)
510 return self
.direntries
[sid
]
512 def _open(self
, start
, size
=UNKNOWN_SIZE
, force_FAT
=False):
513 if size
< self
.minisectorcutoff
and not force_FAT
:
514 if not self
.ministream
:
516 size_ministream
= self
.root
.size
517 self
.ministream
= self
._open
(self
.root
.isectStart
,
518 size_ministream
, force_FAT
=True)
519 return MaxStream(fp
=self
.ministream
, sect
=start
, size
=size
,
520 offset
=0, sectorsize
=self
.minisectorsize
,
521 fat
=self
.minifat
, filesize
=self
.ministream
.size
)
523 return MaxStream(fp
=self
.fp
, sect
=start
, size
=size
,
524 offset
=self
.sectorsize
, sectorsize
=self
.sectorsize
,
525 fat
=self
.fat
, filesize
=self
._filesize
)
527 def _find(self
, filename
):
528 if isinstance(filename
, str):
529 filename
= filename
.split('/')
531 for name
in filename
:
532 for kid
in node
.kids
:
533 if kid
.name
.lower() == name
.lower():
538 def openstream(self
, filename
):
539 sid
= self
._find
(filename
)
540 entry
= self
.direntries
[sid
]
541 return self
._open
(entry
.isectStart
, entry
.size
)
549 """Representing a chunk of a .max file."""
551 def __init__(self
, types
, size
, level
, number
):
562 return "%s[%4x]%04X:%s" % ("" * self
.level
, self
.number
, self
.types
, self
.data
)
565 class ByteArrayChunk(MaxChunk
):
566 """A byte array of a .max chunk."""
568 def __init__(self
, types
, data
, level
, number
):
569 MaxChunk
.__init
__(self
, types
, data
, level
, number
)
571 def set(self
, data
, fmt
, start
, end
):
573 self
.data
= struct
.unpack(fmt
, data
[start
:end
])
574 except Exception as exc
:
576 # print('StructError:', exc, name)
578 def set_string(self
, data
):
580 self
.data
= data
.decode('UTF-16LE')
584 def set_data(self
, data
):
585 if (self
.types
in [0x0340, 0x4001, 0x0456, 0x0962]):
586 self
.set_string(data
)
587 elif (self
.types
in [0x2034, 0x2035]):
588 self
.set(data
, '<' + 'I' * int(len(data
) / 4), 0, len(data
))
589 elif (self
.types
in [0x2501, 0x2503, 0x2504, 0x2505, 0x2511]):
590 self
.set(data
, '<' + 'f' * int(len(data
) / 4), 0, len(data
))
591 elif (self
.types
== 0x2510):
592 self
.set(data
, '<' + 'f' * int(len(data
) / 4 - 1) + 'I', 0, len(data
))
593 elif (self
.types
== 0x0100):
594 self
.set(data
, '<f', 0, len(data
))
599 class ClassIDChunk(ByteArrayChunk
):
600 """The class ID subchunk of a .max chunk."""
602 def __init__(self
, types
, data
, level
, number
):
603 MaxChunk
.__init
__(self
, types
, data
, level
, number
)
606 def set_data(self
, data
):
607 if (self
.types
== 0x2042):
608 self
.set_string(data
) # ClsName
609 elif (self
.types
== 0x2060):
610 self
.set(data
, '<IQI', 0, 16) # DllIndex, ID, SuperID
612 self
.data
= ":".join("%02x" % (c
) for c
in data
)
615 class DirectoryChunk(ByteArrayChunk
):
616 """The directory chunk of a .max file."""
618 def __init__(self
, types
, data
, level
, number
):
619 MaxChunk
.__init
__(self
, types
, data
, level
, number
)
621 def set_data(self
, data
):
622 if (self
.types
== 0x2039):
623 self
.set_string(data
)
624 elif (self
.types
== 0x2037):
625 self
.set_string(data
)
628 class ContainerChunk(MaxChunk
):
629 """A container chunk in a .max file wich includes byte arrays."""
631 def __init__(self
, types
, data
, level
, number
, primReader
=ByteArrayChunk
):
632 MaxChunk
.__init
__(self
, types
, data
, level
, number
)
633 self
.primReader
= primReader
636 return "%s[%4x]%04X" % ("" * self
.level
, self
.number
, self
.types
)
638 def get_first(self
, types
):
639 for child
in self
.children
:
640 if (child
.types
== types
):
644 def set_data(self
, data
):
647 reader
= ChunkReader()
648 self
.children
= reader
.get_chunks(data
, self
.level
+ 1, ContainerChunk
, self
.primReader
)
651 class SceneChunk(ContainerChunk
):
652 """The scene chunk of a .max file wich includes the relevant data for blender."""
654 def __init__(self
, types
, data
, level
, number
, primReader
=ByteArrayChunk
):
655 MaxChunk
.__init
__(self
, types
, data
, level
, number
)
656 self
.primReader
= primReader
660 return "%s[%4x]%s" % ("" * self
.level
, self
.number
, get_cls_name(self
))
662 def set_data(self
, data
):
665 # print('Scene', "%s\n" %(self))
666 reader
= ChunkReader()
667 self
.children
= reader
.get_chunks(data
, self
.level
+ 1,
668 SceneChunk
, ByteArrayChunk
)
672 """The chunk reader class for decoding the byte arrays."""
674 def __init__(self
, name
=None):
677 def get_chunks(self
, data
, level
, conReader
, primReader
):
681 short
, step
= get_short(data
, 0)
682 long, step
= get_long(data
, step
)
683 if (short
== 0x8B1F):
684 short
, step
= get_long(data
, step
)
685 if (short
in (0xB000000, 0xA040000)):
686 data
= zlib
.decompress(data
, zlib
.MAX_WBITS |
32)
687 print(" reading '%s'..." % self
.name
, len(data
))
688 while offset
< len(data
):
690 offset
, chunk
= self
.get_next_chunk(data
, offset
, level
,
691 len(chunks
), conReader
, primReader
)
695 def get_next_chunk(self
, data
, offset
, level
, number
, conReader
, primReader
):
697 typ
, siz
, = struct
.unpack("<Hi", data
[offset
:offset
+ header
])
698 chunksize
= siz
& UNKNOWN_SIZE
700 siz
, = struct
.unpack("<q", data
[offset
+ header
:offset
+ header
+ 8])
702 chunksize
= siz
& MAXFILE_SIZE
704 chunk
= conReader(typ
, chunksize
, level
, number
, primReader
)
706 chunk
= primReader(typ
, chunksize
, level
, number
)
707 chunkdata
= data
[offset
+ header
:offset
+ chunksize
]
708 chunk
.set_data(chunkdata
)
709 return offset
+ chunksize
, chunk
713 """Representing a three dimensional vector plus pointflag."""
724 return "[%s]-%X,%X,%X,[%s]" % ('/'.join("%d" % p
for p
in self
.points
),
725 self
.fH
, self
.f1
, self
.f2
,
726 ','.join("%X" % f
for f
in self
.fA
))
730 """Representing a material chunk of a scene chunk."""
735 def set(self
, name
, value
):
736 self
.data
[name
] = value
738 def get(self
, name
, default
=None):
740 if (name
in self
.data
):
741 value
= self
.data
[name
]
748 if isinstance(index
, tuple):
751 if (index
< len(SCENE_LIST
[0].children
)):
752 return SCENE_LIST
[0].children
[index
]
756 def get_node_parent(node
):
759 chunk
= node
.get_first(0x0960)
760 if (chunk
is not None):
761 idx
, offset
= get_long(chunk
.data
, 0)
762 parent
= get_node(idx
)
766 def get_node_name(node
):
768 name
= node
.get_first(TYP_NAME
)
774 def get_class(chunk
):
776 if (chunk
.types
< len(CLS_DIR3_LIST
)):
777 return CLS_DIR3_LIST
[chunk
.types
]
783 idx
= chunk
.get_first(0x2060).data
[0]
784 if (idx
< len(DLL_DIR_LIST
)):
785 return DLL_DIR_LIST
[idx
]
790 clid
= get_class(chunk
)
792 return clid
.get_first(0x2060).data
[1]
796 def get_super_id(chunk
):
797 clid
= get_class(chunk
)
799 return clid
.get_first(0x2060).data
[2]
803 def get_cls_name(chunk
):
804 clid
= get_class(chunk
)
806 cls_name
= clid
.get_first(0x2042).data
808 return "'%s'" % (cls_name
)
810 return "'%r'" % (cls_name
)
811 return u
"%04X" % (chunk
.types
)
814 def get_references(chunk
):
815 refs
= chunk
.get_first(0x2034)
817 references
= [get_node(idx
) for idx
in refs
.data
]
821 def get_reference(chunk
):
823 refs
= chunk
.get_first(0x2035)
826 while offset
< len(refs
.data
):
827 key
= refs
.data
[offset
]
829 idx
= refs
.data
[offset
]
831 references
[key
] = get_node(idx
)
835 def read_chunks(maxfile
, name
, filename
, conReader
=ContainerChunk
, primReader
=ByteArrayChunk
):
836 with maxfile
.openstream(name
) as file:
838 reader
= ChunkReader(name
)
839 return reader
.get_chunks(scene
, 0, conReader
, primReader
)
842 def read_class_data(maxfile
, filename
):
844 CLS_DATA
= read_chunks(maxfile
, 'ClassData', filename
+ '.ClsDat.bin')
847 def read_class_directory(maxfile
, filename
):
850 CLS_DIR3_LIST
= read_chunks(maxfile
, 'ClassDirectory3',
851 filename
+ '.ClsDir3.bin', ContainerChunk
, ClassIDChunk
)
853 CLS_DIR3_LIST
= read_chunks(maxfile
, 'ClassDirectory',
854 filename
+ '.ClsDir.bin', ContainerChunk
, ClassIDChunk
)
855 for clsdir
in CLS_DIR3_LIST
:
856 clsdir
.dll
= get_dll(clsdir
)
859 def read_config(maxfile
, filename
):
861 CONFIG
= read_chunks(maxfile
, 'Config', filename
+ '.Cnf.bin')
864 def read_directory(maxfile
, filename
):
866 DLL_DIR_LIST
= read_chunks(maxfile
, 'DllDirectory',
867 filename
+ '.DllDir.bin', ContainerChunk
, DirectoryChunk
)
870 def read_video_postqueue(maxfile
, filename
):
872 VID_PST_QUE
= read_chunks(maxfile
, 'VideoPostQueue', filename
+ '.VidPstQue.bin')
875 def get_point(floatval
, default
=0.0):
876 uid
= get_guid(floatval
)
877 if (uid
== 0x2007): # Bezier-Float
878 flv
= floatval
.get_first(0x7127)
881 return flv
.get_first(0x2501).data
[0]
883 print("SyntaxError: %s - assuming 0.0!\n" % (floatval
))
885 if (uid
== FLOAT_POINT
): # Float Wire
886 flv
= get_references(floatval
)[0]
887 return get_point(flv
)
892 def get_point_3d(chunk
, default
=0.0):
895 refs
= get_references(chunk
)
897 flt
= get_point(fl
, default
)
903 def get_position(pos
):
905 mtx
= mathutils
.Matrix
.Identity(4)
908 if (uid
== MATRIX_POS
): # Position XYZ
909 position
= get_point_3d(pos
)
910 elif (uid
== 0x442312): # TCB Position
911 position
= pos
.get_first(0x2503).data
912 elif (uid
== 0x2008): # Bezier Position
913 position
= pos
.get_first(0x2503).data
915 mtx
= mathutils
.Matrix
.Translation(position
)
919 def get_rotation(pos
):
921 mtx
= mathutils
.Matrix
.Identity(4)
924 if (uid
== 0x2012): # Euler XYZ
925 rot
= get_point_3d(pos
)
926 rotation
= mathutils
.Euler((rot
[2], rot
[1], rot
[0])).to_quaternion()
927 elif (uid
== 0x442313): # TCB Rotation
928 rot
= pos
.get_first(0x2504).data
929 rotation
= mathutils
.Quaternion((rot
[0], rot
[1], rot
[2], rot
[3]))
930 elif (uid
== 0x4B4B1003): # Rotation List
931 refs
= get_references(pos
)
933 return get_rotation(refs
[0])
934 elif (uid
== MATRIX_ROT
): # Rotation Wire
935 return get_rotation(get_references(pos
)[0])
937 mtx
= mathutils
.Matrix
.Rotation(rotation
.angle
, 4, rotation
.axis
)
942 mtx
= mathutils
.Matrix
.Identity(4)
945 if (uid
== 0x2010): # Bezier Scale
946 scale
= pos
.get_first(0x2501)
948 scale
= pos
.get_first(0x2505)
950 elif (uid
== 0x442315): # TCB Zoom
951 scale
= pos
.get_first(0x2501)
953 scale
= pos
.get_first(0x2505)
955 elif (uid
== MATRIX_SCL
): # ScaleXYZ
956 pos
= get_point_3d(pos
, 1.0)
959 mtx
= mathutils
.Matrix
.Diagonal(pos
[:3]).to_4x4()
963 def create_matrix(prc
):
964 mtx
= mathutils
.Matrix
.Identity(4)
965 pos
= rot
= scl
= None
967 if (uid
== 0x2005): # Position/Rotation/Scale
968 pos
= get_position(get_references(prc
)[0])
969 rot
= get_rotation(get_references(prc
)[1])
970 scl
= get_scale(get_references(prc
)[2])
971 elif (uid
== 0x9154): # BipSlave Control
972 biped_sub_anim
= get_references(prc
)[2]
973 refs
= get_references(biped_sub_anim
)
974 scl
= get_scale(get_references(refs
[1])[0])
975 rot
= get_rotation(get_references(refs
[2])[0])
976 pos
= get_position(get_references(refs
[3])[0])
977 if (pos
is not None):
979 if (rot
is not None):
981 if (scl
is not None):
986 def get_matrix_mesh_material(node
):
987 refs
= get_reference(node
)
989 prs
= refs
.get(0, None)
990 msh
= refs
.get(1, None)
991 mat
= refs
.get(3, None)
992 lyr
= refs
.get(6, None)
994 refs
= get_references(node
)
1001 return prs
, msh
, mat
, lyr
1004 def get_property(properties
, idx
):
1005 for child
in properties
.children
:
1006 if (child
.types
& 0x100E):
1007 if (get_short(child
.data
, 0)[0] == idx
):
1012 def get_color(colors
, idx
):
1013 prop
= get_property(colors
, idx
)
1014 if (prop
is not None):
1015 siz
= len(prop
.data
) - 12
1016 col
, offset
= get_floats(prop
.data
, siz
, 3)
1017 return (col
[0], col
[1], col
[2])
1021 def get_value(colors
, idx
):
1022 prop
= get_property(colors
, idx
)
1023 if (prop
is not None):
1024 siz
= len(prop
.data
) - 4
1025 val
, offset
= get_float(prop
.data
, siz
)
1030 def get_parameter(colors
, fmt
):
1032 siz
= len(colors
.data
) - 12
1033 para
, offset
= get_floats(colors
.data
, siz
, 3)
1035 siz
= len(colors
.data
) - 4
1036 para
, offset
= get_float(colors
.data
, siz
)
1040 def get_standard_material(refs
):
1045 parameters
= get_references(colors
)[0]
1046 material
= Material()
1047 material
.set('ambient', get_color(parameters
, 0x00))
1048 material
.set('diffuse', get_color(parameters
, 0x01))
1049 material
.set('specular', get_color(parameters
, 0x02))
1050 material
.set('emissive', get_color(parameters
, 0x08))
1051 material
.set('shinines', get_value(parameters
, 0x0B))
1052 parablock
= refs
[4] # ParameterBlock2
1053 material
.set('glossines', get_value(parablock
, 0x02))
1054 material
.set('metallic', get_value(parablock
, 0x05))
1060 def get_vray_material(vry
):
1061 material
= Material()
1063 material
.set('diffuse', get_color(vry
, 0x01))
1064 material
.set('specular', get_color(vry
, 0x02))
1065 material
.set('shinines', get_value(vry
, 0x03))
1066 material
.set('refraction', get_value(vry
, 0x09))
1067 material
.set('emissive', get_color(vry
, 0x17))
1068 material
.set('glossines', get_value(vry
, 0x18))
1069 material
.set('metallic', get_value(vry
, 0x19))
1075 def get_corona_material(mtl
):
1076 material
= Material()
1079 material
.set('diffuse', get_parameter(cor
[3], 0x1))
1080 material
.set('specular', get_parameter(cor
[4], 0x1))
1081 material
.set('emissive', get_parameter(cor
[8], 0x1))
1082 material
.set('glossines', get_parameter(cor
[9], 0x2))
1088 def get_arch_material(ad
):
1089 material
= Material()
1091 material
.set('diffuse', get_color(ad
, 0x1A))
1092 material
.set('specular', get_color(ad
, 0x05))
1093 material
.set('shinines', get_value(ad
, 0x0B))
1099 def adjust_material(obj
, mat
):
1101 if (mat
is not None):
1103 if (uid
== 0x0002): # Standard
1104 refs
= get_references(mat
)
1105 material
= get_standard_material(refs
)
1106 elif (uid
== 0x0200): # Multi/Sub-Object
1107 refs
= get_references(mat
)
1108 material
= adjust_material(obj
, refs
[-1])
1109 elif (uid
== VRAY_MTL
): # VRayMtl
1110 refs
= get_reference(mat
)
1111 material
= get_vray_material(refs
[1])
1112 elif (uid
== CORO_MTL
): # CoronaMtl
1113 refs
= get_references(mat
)
1114 material
= get_corona_material(refs
[0])
1115 elif (uid
== ARCH_MTL
): # Arch
1116 refs
= get_references(mat
)
1117 material
= get_arch_material(refs
[0])
1118 if (obj
is not None) and (material
is not None):
1119 objMaterial
= bpy
.data
.materials
.new(get_cls_name(mat
))
1120 obj
.data
.materials
.append(objMaterial
)
1121 matShader
= PrincipledBSDFWrapper(objMaterial
, is_readonly
=False, use_nodes
=True)
1122 matShader
.base_color
= objMaterial
.diffuse_color
[:3] = material
.get('diffuse', (0.8, 0.8, 0.8))
1123 matShader
.specular_tint
= objMaterial
.specular_color
[:3] = material
.get('specular', (1, 1, 1))
1124 matShader
.specular
= objMaterial
.specular_intensity
= material
.get('glossines', 0.5)
1125 matShader
.roughness
= objMaterial
.roughness
= 1.0 - material
.get('shinines', 0.6)
1126 matShader
.metallic
= objMaterial
.metallic
= material
.get('metallic', 0)
1127 matShader
.emission_color
= material
.get('emissive', (0, 0, 0))
1128 matShader
.ior
= material
.get('refraction', 1.45)
1131 def adjust_matrix(obj
, node
):
1132 mtx
= create_matrix(node
).flatten()
1133 plc
= mathutils
.Matrix(*mtx
)
1134 obj
.matrix_world
= plc
1138 def create_shape(context
, pts
, indices
, node
, key
, mtx
, mat
, umt
):
1139 name
= node
.get_first(TYP_NAME
).data
1140 shape
= bpy
.data
.meshes
.new(name
)
1141 if (key
is not None):
1142 name
= "%s_%d" % (name
, key
)
1146 looplines
= loop
= 0
1147 nb_faces
= len(indices
)
1148 for fid
in range(nb_faces
):
1149 polyface
= indices
[fid
]
1150 looplines
+= len(polyface
)
1151 shape
.vertices
.add(len(pts
) // 3)
1152 shape
.loops
.add(looplines
)
1153 shape
.polygons
.add(nb_faces
)
1154 shape
.vertices
.foreach_set("co", pts
)
1156 loopstart
.append(loop
)
1159 shape
.polygons
.foreach_set("loop_start", loopstart
)
1160 shape
.loops
.foreach_set("vertex_index", data
)
1165 obj
= bpy
.data
.objects
.new(name
, shape
)
1166 context
.view_layer
.active_layer_collection
.collection
.objects
.link(obj
)
1167 obj
.matrix_world
= mtx
1169 adjust_material(obj
, mat
)
1174 def calc_point(data
):
1176 long, offset
= get_long(data
, 0)
1177 while (offset
< len(data
)):
1178 val
, offset
= get_long(data
, offset
)
1179 flt
, offset
= get_floats(data
, offset
, 3)
1184 def calc_point_float(data
):
1186 long, offset
= get_long(data
, 0)
1187 while (offset
< len(data
)):
1188 flt
, offset
= get_floats(data
, offset
, 3)
1193 def calc_point_3d(chunk
):
1195 count
, offset
= get_long(data
, 0)
1198 while (offset
< len(data
)):
1200 long, offset
= get_long(data
, offset
)
1201 pt
.points
, offset
= get_longs(data
, offset
, long)
1202 pt
.flags
, offset
= get_short(data
, offset
)
1203 if ((pt
.flags
& 0x01) != 0):
1204 pt
.f1
, offset
= get_long(data
, offset
)
1205 if ((pt
.flags
& 0x08) != 0):
1206 pt
.fH
, offset
= get_short(data
, offset
)
1207 if ((pt
.flags
& 0x10) != 0):
1208 pt
.f2
, offset
= get_long(data
, offset
)
1209 if ((pt
.flags
& 0x20) != 0):
1210 pt
.fA
, offset
= get_longs(data
, offset
, 2 * (long - 3))
1211 if (len(pt
.points
) > 0):
1212 pointlist
.append(pt
)
1213 except Exception as exc
:
1214 print('ArrayError:\n', "%s: offset = %d\n" % (exc
, offset
))
1218 def get_point_array(values
):
1220 if len(values
) >= 4:
1221 count
, offset
= get_long(values
, 0)
1223 floats
, offset
= get_floats(values
, offset
, 3)
1224 verts
.extend(floats
)
1229 def get_poly_4p(points
):
1231 for point
in points
:
1234 if (key
not in vertex
):
1236 vertex
[key
].append(ngon
)
1240 def get_poly_5p(data
):
1241 count
, offset
= get_long(data
, 0)
1244 pt
, offset
= get_longs(data
, offset
, 3)
1251 def get_poly_6p(data
):
1252 count
, offset
= get_long(data
, 0)
1254 while (offset
< len(data
)):
1255 long, offset
= get_longs(data
, offset
, 6)
1257 while ((i
> 3) and (long[i
] < 0)):
1260 polylist
.append(long[1:i
])
1264 def get_poly_data(chunk
):
1268 while (offset
< len(data
)):
1269 count
, offset
= get_long(data
, offset
)
1270 points
, offset
= get_longs(data
, offset
, count
)
1271 polylist
.append(points
)
1275 def create_editable_poly(context
, node
, msh
, mat
, mtx
, umt
, uvm
):
1276 coords
= point4i
= point6i
= pointNi
= None
1277 poly
= msh
.get_first(0x08FE)
1283 for child
in poly
.children
:
1284 if (child
.types
== 0x0100):
1285 coords
= calc_point(child
.data
)
1286 elif (child
.types
== 0x0108):
1287 point6i
= child
.data
1288 elif (child
.types
== 0x011A):
1289 point4i
= calc_point_3d(child
)
1290 elif (child
.types
== 0x0310):
1291 pointNi
= child
.data
1292 elif (child
.types
== 0x0124):
1293 lidx
.append(get_long(child
.data
, 0)[0])
1294 elif (child
.types
== 0x0128):
1295 lcrd
.append(calc_point_float(child
.data
))
1296 elif (child
.types
== 0x012B):
1297 lply
.append(get_poly_data(child
))
1298 if (point4i
is not None):
1299 vertex
= get_poly_4p(point4i
)
1300 if (len(vertex
) > 0):
1301 for key
, ngons
in vertex
.items():
1302 created |
= create_shape(context
, coords
, ngons
,
1303 node
, key
, mtx
, mat
, umt
)
1306 elif (point6i
is not None):
1307 ngons
= get_poly_6p(point6i
)
1308 created
= create_shape(context
, coords
, ngons
, node
,
1309 None, mtx
, mat
, umt
)
1310 elif (pointNi
is not None):
1311 ngons
= get_poly_5p(pointNi
)
1312 created
= create_shape(context
, coords
, ngons
, node
,
1313 None, mtx
, mat
, umt
)
1314 if (uvm
and len(lidx
) > 0):
1315 for i
in range(len(lidx
)):
1316 created |
= create_shape(context
, lcrd
[i
], lply
[i
],
1317 node
, lidx
[i
], mtx
, mat
, umt
)
1321 def create_editable_mesh(context
, node
, msh
, mat
, mtx
, umt
):
1322 poly
= msh
.get_first(0x08FE)
1325 vertex_chunk
= poly
.get_first(0x0914)
1326 clsid_chunk
= poly
.get_first(0x0912)
1327 coords
= get_point_array(vertex_chunk
.data
)
1328 ngons
= get_poly_5p(clsid_chunk
.data
)
1329 created
= create_shape(context
, coords
, ngons
, node
, None, mtx
, mat
, umt
)
1333 def create_shell(context
, node
, shell
, mat
, mtx
, umt
, uvm
):
1334 refs
= get_references(shell
)
1336 if (get_cls_name(msh
) == "'Editable Poly'"):
1337 created
= create_editable_poly(context
, node
, msh
, mat
, mtx
, umt
, uvm
)
1339 created
= create_editable_mesh(context
, node
, msh
, mat
, mtx
, umt
)
1343 def create_skipable(context
, node
, skip
):
1344 name
= node
.get_first(TYP_NAME
).data
1345 print(" skipping %s '%s'... " % (skip
, name
))
1349 def create_mesh(context
, node
, msh
, mtx
, mat
, umt
, uvm
):
1353 if (uid
== EDIT_MESH
):
1354 created
= create_editable_mesh(context
, node
, msh
, mat
, mtx
, umt
)
1355 elif (uid
== EDIT_POLY
):
1356 created
= create_editable_poly(context
, node
, msh
, mat
, mtx
, umt
, uvm
)
1357 elif (uid
in {0x2032, 0x2033}):
1358 created
= create_shell(context
, node
, msh
, mat
, mtx
, umt
, uvm
)
1360 skip
= SKIPPABLE
.get(uid
)
1361 if (skip
is not None):
1362 created
= create_skipable(context
, node
, skip
)
1366 def create_object(context
, node
, mscale
, usemat
, uvmesh
, transform
):
1367 parent
= get_node_parent(node
)
1368 node
.parent
= parent
1369 prs
, msh
, mat
, lyr
= get_matrix_mesh_material(node
)
1370 while ((parent
is not None) and (get_guid(parent
) != 0x02)):
1371 parent_mtx
= parent
.matrix
1373 prs
= prs
.dot(parent_mtx
)
1374 parent
= get_node_parent(parent
)
1376 mtx
= create_matrix(prs
) @ mscale
1379 created
, uid
= create_mesh(context
, node
, msh
, mtx
, mat
, usemat
, uvmesh
)
1382 def make_scene(context
, mscale
, usemat
, uvmesh
, transform
, parent
):
1383 for chunk
in parent
.children
:
1384 if (isinstance(chunk
, SceneChunk
)):
1385 if ((get_guid(chunk
) == 0x01) and (get_super_id(chunk
) == 0x01)):
1387 create_object(context
, chunk
, mscale
, usemat
, uvmesh
, transform
)
1388 except Exception as exc
:
1389 print('ImportError:', exc
, chunk
)
1392 def read_scene(context
, maxfile
, filename
, mscale
, usemat
, uvmesh
, transform
):
1394 SCENE_LIST
= read_chunks(maxfile
, 'Scene', filename
+ '.Scn.bin', conReader
=SceneChunk
)
1395 make_scene(context
, mscale
, usemat
, uvmesh
, transform
, SCENE_LIST
[0])
1398 def read(context
, filename
, mscale
, usemat
, uvmesh
, transform
):
1399 if (is_maxfile(filename
)):
1400 maxfile
= ImportMaxFile(filename
)
1401 read_class_data(maxfile
, filename
)
1402 read_config(maxfile
, filename
)
1403 read_directory(maxfile
, filename
)
1404 read_class_directory(maxfile
, filename
)
1405 read_video_postqueue(maxfile
, filename
)
1406 read_scene(context
, maxfile
, filename
, mscale
, usemat
, uvmesh
, transform
)
1408 print("File seems to be no 3D Studio Max file!")
1411 def load(operator
, context
, files
=None, directory
="", filepath
="", scale_objects
=1.0, use_material
=True,
1412 use_uv_mesh
=False, use_collection
=False, use_apply_matrix
=False, global_matrix
=None):
1413 context
.window
.cursor_set('WAIT')
1414 mscale
= mathutils
.Matrix
.Scale(scale_objects
, 4)
1415 if global_matrix
is not None:
1416 mscale
= global_matrix
@ mscale
1418 default_layer
= context
.view_layer
.active_layer_collection
.collection
1421 collection
= bpy
.data
.collections
.new(fl
.name
.split(".")[0])
1422 context
.scene
.collection
.children
.link(collection
)
1423 context
.view_layer
.active_layer_collection
= context
.view_layer
.layer_collection
.children
[collection
.name
]
1424 read(context
, os
.path
.join(directory
, fl
.name
), mscale
, usemat
=use_material
, uvmesh
=use_uv_mesh
, transform
=use_apply_matrix
)
1426 active
= context
.view_layer
.layer_collection
.children
.get(default_layer
.name
)
1427 if active
is not None:
1428 context
.view_layer
.active_layer_collection
= active
1430 context
.window
.cursor_set('DEFAULT')