3 # Copyright The SCons Foundation
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 """ Tool-specific initialization for TeX.
26 Generates .dvi files from .tex files
28 There normally shouldn't be any need to import this module directly.
29 It will usually be imported through the generic SCons.Tool.Tool()
44 import SCons
.Scanner
.LaTeX
48 must_rerun_latex
= True
50 # these are files that just need to be checked for changes and then rerun latex
51 check_suffixes
= ['.toc', '.lof', '.lot', '.out', '.nav', '.snm']
53 # these are files that require bibtex or makeindex to be run when they change
54 all_suffixes
= check_suffixes
+ ['.bbl', '.idx', '.nlo', '.glo', '.acn', '.bcf']
57 # regular expressions used to search for Latex features
58 # or outputs that require rerunning latex
60 # search for all .aux files opened by latex (recorded in the .fls file)
61 openout_aux_re
= re
.compile(r
"OUTPUT *(.*\.aux)")
63 # search for all .bcf files opened by latex (recorded in the .fls file)
65 openout_bcf_re
= re
.compile(r
"OUTPUT *(.*\.bcf)")
67 #printindex_re = re.compile(r"^[^%]*\\printindex", re.MULTILINE)
68 #printnomenclature_re = re.compile(r"^[^%]*\\printnomenclature", re.MULTILINE)
69 #printglossary_re = re.compile(r"^[^%]*\\printglossary", re.MULTILINE)
71 # search to find rerun warnings
72 warning_rerun_str
= r
'(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)'
73 warning_rerun_re
= re
.compile(warning_rerun_str
, re
.MULTILINE
)
75 # search to find citation rerun warnings
76 rerun_citations_str
= r
"^LaTeX Warning:.*\n.*Rerun to get citations correct"
77 rerun_citations_re
= re
.compile(rerun_citations_str
, re
.MULTILINE
)
79 # search to find undefined references or citations warnings
80 undefined_references_str
= r
'(^LaTeX Warning:.*undefined references)|(^Package \w+ Warning:.*undefined citations)'
81 undefined_references_re
= re
.compile(undefined_references_str
, re
.MULTILINE
)
84 auxfile_re
= re
.compile(r
".", re
.MULTILINE
)
85 tableofcontents_re
= re
.compile(r
"^[^%\n]*\\tableofcontents", re
.MULTILINE
)
86 makeindex_re
= re
.compile(r
"^[^%\n]*\\makeindex", re
.MULTILINE
)
87 bibliography_re
= re
.compile(r
"^[^%\n]*\\bibliography", re
.MULTILINE
)
88 bibunit_re
= re
.compile(r
"^[^%\n]*\\begin\{bibunit\}", re
.MULTILINE
)
89 multibib_re
= re
.compile(r
"^[^%\n]*\\newcites\{([^\}]*)\}", re
.MULTILINE
)
90 addbibresource_re
= re
.compile(r
"^[^%\n]*\\(addbibresource|addglobalbib|addsectionbib)", re
.MULTILINE
)
91 listoffigures_re
= re
.compile(r
"^[^%\n]*\\listoffigures", re
.MULTILINE
)
92 listoftables_re
= re
.compile(r
"^[^%\n]*\\listoftables", re
.MULTILINE
)
93 hyperref_re
= re
.compile(r
"^[^%\n]*\\usepackage.*\{hyperref\}", re
.MULTILINE
)
94 makenomenclature_re
= re
.compile(r
"^[^%\n]*\\makenomenclature", re
.MULTILINE
)
95 makeglossary_re
= re
.compile(r
"^[^%\n]*\\makeglossary", re
.MULTILINE
)
96 makeglossaries_re
= re
.compile(r
"^[^%\n]*\\makeglossaries", re
.MULTILINE
)
97 makeacronyms_re
= re
.compile(r
"^[^%\n]*\\makeglossaries", re
.MULTILINE
)
98 beamer_re
= re
.compile(r
"^[^%\n]*\\documentclass\{beamer\}", re
.MULTILINE
)
99 regex
= r
'^[^%\n]*\\newglossary\s*\[([^\]]+)\]?\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}'
100 newglossary_re
= re
.compile(regex
, re
.MULTILINE
)
101 biblatex_re
= re
.compile(r
"^[^%\n]*\\usepackage.*\{biblatex\}", re
.MULTILINE
)
103 newglossary_suffix
= []
105 # search to find all files included by Latex
106 include_re
= re
.compile(r
'^[^%\n]*\\(?:include|input){([^}]*)}', re
.MULTILINE
)
107 includeOnly_re
= re
.compile(r
'^[^%\n]*\\(?:include){([^}]*)}', re
.MULTILINE
)
109 # search to find all graphics files included by Latex
110 includegraphics_re
= re
.compile(r
'^[^%\n]*\\(?:includegraphics(?:\[[^\]]+\])?){([^}]*)}', re
.MULTILINE
)
112 # search to find all files opened by Latex (recorded in .log file)
113 openout_re
= re
.compile(r
"OUTPUT *(.*)")
115 # list of graphics file extensions for TeX and LaTeX
116 TexGraphics
= SCons
.Scanner
.LaTeX
.TexGraphics
117 LatexGraphics
= SCons
.Scanner
.LaTeX
.LatexGraphics
119 # An Action sufficient to build any generic tex file.
122 # An action to build a latex file. This action might be needed more
123 # than once if we are dealing with labels and bibtex.
126 # An action to run BibTeX on a file.
129 # An action to run Biber on a file.
132 # An action to run MakeIndex on a file.
133 MakeIndexAction
= None
135 # An action to run MakeIndex (for nomencl) on a file.
138 # An action to run MakeIndex (for glossary) on a file.
139 MakeGlossaryAction
= None
141 # An action to run MakeIndex (for acronyms) on a file.
142 MakeAcronymsAction
= None
144 # An action to run MakeIndex (for newglossary commands) on a file.
145 MakeNewGlossaryAction
= None
147 # Used as a return value of modify_env_var if the variable is not set.
148 _null
= SCons
.Scanner
.LaTeX
._null
150 modify_env_var
= SCons
.Scanner
.LaTeX
.modify_env_var
152 def check_file_error_message(utility
, filename
: str='log') -> None:
153 msg
= '%s returned an error, check the %s file\n' % (utility
, filename
)
154 sys
.stdout
.write(msg
)
156 def FindFile(name
,suffixes
,paths
,env
,requireExt
: bool=False):
158 name
,ext
= SCons
.Util
.splitext(name
)
159 # if the user gave an extension use it.
163 print(" searching for '%s' with extensions: " % name
,suffixes
)
166 testName
= os
.path
.join(path
,name
)
168 print(" look for '%s'" % testName
)
169 if os
.path
.isfile(testName
):
171 print(" found '%s'" % testName
)
172 return env
.fs
.File(testName
)
174 name_ext
= SCons
.Util
.splitext(testName
)[1]
178 # if no suffix try adding those passed in
179 for suffix
in suffixes
:
180 testNameExt
= testName
+ suffix
182 print(" look for '%s'" % testNameExt
)
184 if os
.path
.isfile(testNameExt
):
186 print(" found '%s'" % testNameExt
)
187 return env
.fs
.File(testNameExt
)
189 print(" did not find '%s'" % name
)
192 def InternalLaTeXAuxAction(XXXLaTeXAction
, target
= None, source
= None, env
=None):
193 """A builder for LaTeX files that checks the output in the aux file
194 and decides how many times to use LaTeXAction, and BibTeXAction."""
196 global must_rerun_latex
198 # This routine is called with two actions. In this file for DVI builds
199 # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction
200 # set this up now for the case where the user requests a different extension
201 # for the target filename
202 if XXXLaTeXAction
== LaTeXAction
:
203 callerSuffix
= ".dvi"
205 callerSuffix
= env
['PDFSUFFIX']
207 basename
= SCons
.Util
.splitext(str(source
[0]))[0]
208 basedir
= os
.path
.split(str(source
[0]))[0]
209 basefile
= os
.path
.split(str(basename
))[1]
210 abspath
= os
.path
.abspath(basedir
)
212 targetext
= os
.path
.splitext(str(target
[0]))[1]
213 targetdir
= os
.path
.split(str(target
[0]))[0]
216 for var
in SCons
.Scanner
.LaTeX
.LaTeX
.env_variables
:
217 saved_env
[var
] = modify_env_var(env
, var
, abspath
)
219 # Create base file names with the target directory since the auxiliary files
220 # will be made there. That's because the *COM variables have the cd
221 # command in the prolog. We check
222 # for the existence of files before opening them--even ones like the
223 # aux file that TeX always creates--to make it possible to write tests
224 # with stubs that don't necessarily generate all of the same files.
226 targetbase
= os
.path
.join(targetdir
, basefile
)
228 # if there is a \makeindex there will be a .idx and thus
229 # we have to run makeindex at least once to keep the build
230 # happy even if there is no index.
231 # Same for glossaries, nomenclature, and acronyms
232 src_content
= source
[0].get_text_contents()
233 run_makeindex
= makeindex_re
.search(src_content
) and not os
.path
.isfile(targetbase
+ '.idx')
234 run_nomenclature
= makenomenclature_re
.search(src_content
) and not os
.path
.isfile(targetbase
+ '.nlo')
235 run_glossary
= makeglossary_re
.search(src_content
) and not os
.path
.isfile(targetbase
+ '.glo')
236 run_glossaries
= makeglossaries_re
.search(src_content
) and not os
.path
.isfile(targetbase
+ '.glo')
237 run_acronyms
= makeacronyms_re
.search(src_content
) and not os
.path
.isfile(targetbase
+ '.acn')
243 for suffix
in all_suffixes
+sum(newglossary_suffix
, []):
244 theNode
= env
.fs
.File(targetbase
+ suffix
)
245 suffix_nodes
[suffix
] = theNode
246 saved_hashes
[suffix
] = theNode
.get_csig()
249 print("hashes: ",saved_hashes
)
251 must_rerun_latex
= True
253 # .aux files already processed by BibTex
254 already_bibtexed
= []
256 def check_content_hash(filenode
, suffix
) -> bool:
258 Routine to update content hash and compare
260 global must_rerun_latex
261 # two calls to clear old csig
262 filenode
.clear_memoized_values()
263 filenode
.ninfo
= filenode
.new_ninfo()
264 new_md5
= filenode
.get_csig()
266 if saved_hashes
[suffix
] == new_md5
:
268 print("file %s not changed" % (targetbase
+suffix
))
269 return False # unchanged
270 saved_hashes
[suffix
] = new_md5
271 must_rerun_latex
= True
273 print("file %s changed, rerunning Latex, new hash = " % (targetbase
+suffix
), new_md5
)
274 return True # changed
276 # generate the file name that latex will generate
277 resultfilename
= targetbase
+ callerSuffix
281 while must_rerun_latex
and count
< int(env
.subst('$LATEXRETRIES')):
282 result
= XXXLaTeXAction(target
, source
, env
)
288 must_rerun_latex
= False
289 # Decide if various things need to be run, or run again.
291 # Read the log file to find warnings/errors
292 logfilename
= targetbase
+ '.log'
294 if os
.path
.isfile(logfilename
):
295 with
open(logfilename
, "rb") as f
:
296 logContent
= f
.read().decode(errors
='replace')
298 # Read the fls file to find all .aux files
299 flsfilename
= targetbase
+ '.fls'
302 if os
.path
.isfile(flsfilename
):
303 with
open(flsfilename
, "r") as f
:
304 flsContent
= f
.read()
305 auxfiles
= openout_aux_re
.findall(flsContent
)
310 auxfiles
= list(dups
.keys())
313 if os
.path
.isfile(flsfilename
):
314 with
open(flsfilename
, "r") as f
:
315 flsContent
= f
.read()
316 bcffiles
= openout_bcf_re
.findall(flsContent
)
321 bcffiles
= list(dups
.keys())
324 print("auxfiles ",auxfiles
)
325 print("bcffiles ",bcffiles
)
327 # Now decide if bibtex will need to be run.
328 # The information that bibtex reads from the .aux file is
329 # pass-independent. If we find (below) that the .bbl file is unchanged,
330 # then the last latex saw a correct bibliography.
331 # Therefore only do this once
332 # Go through all .aux files and remember the files already done.
333 for auxfilename
in auxfiles
:
334 if auxfilename
not in already_bibtexed
:
335 already_bibtexed
.append(auxfilename
)
336 target_aux
= os
.path
.join(targetdir
, auxfilename
)
337 if os
.path
.isfile(target_aux
):
338 with
open(target_aux
, "r") as f
:
340 if content
.find("bibdata") != -1:
342 print("Need to run bibtex on ",auxfilename
)
343 bibfile
= env
.fs
.File(SCons
.Util
.splitext(target_aux
)[0])
344 result
= BibTeXAction(bibfile
, bibfile
, env
)
346 check_file_error_message(env
['BIBTEX'], 'blg')
347 check_content_hash(suffix_nodes
[".bbl"], ".bbl")
349 # Now decide if biber will need to be run.
350 # When the backend for biblatex is biber (by choice or default) the
351 # citation information is put in the .bcf file.
352 # The information that biber reads from the .bcf file is
353 # pass-independent. If we find (below) that the .bbl file is unchanged,
354 # then the last latex saw a correct bibliography.
355 # Therefore only do this once
356 # Go through all .bcf files and remember the files already done.
357 for bcffilename
in bcffiles
:
358 if bcffilename
not in already_bibtexed
:
359 already_bibtexed
.append(bcffilename
)
360 target_bcf
= os
.path
.join(targetdir
, bcffilename
)
361 if os
.path
.isfile(target_bcf
):
362 with
open(target_bcf
, "r") as f
:
364 if content
.find("bibdata") != -1:
366 print("Need to run biber on ",bcffilename
)
367 bibfile
= env
.fs
.File(SCons
.Util
.splitext(target_bcf
)[0])
368 result
= BiberAction(bibfile
, bibfile
, env
)
370 check_file_error_message(env
['BIBER'], 'blg')
371 check_content_hash(suffix_nodes
[".bbl"], ".bbl")
373 # Now decide if latex will need to be run again due to index.
374 if check_content_hash(suffix_nodes
['.idx'], '.idx') or (count
== 1 and run_makeindex
):
375 # We must run makeindex
377 print("Need to run makeindex")
378 idxfile
= suffix_nodes
['.idx']
379 result
= MakeIndexAction(idxfile
, idxfile
, env
)
381 check_file_error_message(env
['MAKEINDEX'], 'ilg')
384 # TO-DO: need to add a way for the user to extend this list for whatever
385 # auxiliary files they create in other (or their own) packages
386 # Harder is case is where an action needs to be called -- that should be rare (I hope?)
388 for index
in check_suffixes
:
389 check_content_hash(suffix_nodes
[index
], index
)
391 # Now decide if latex will need to be run again due to nomenclature.
392 if check_content_hash(suffix_nodes
['.nlo'], '.nlo') or (count
== 1 and run_nomenclature
):
393 # We must run makeindex
395 print("Need to run makeindex for nomenclature")
396 nclfile
= suffix_nodes
['.nlo']
397 result
= MakeNclAction(nclfile
, nclfile
, env
)
399 check_file_error_message('%s (nomenclature)' % env
['MAKENCL'],
403 # Now decide if latex will need to be run again due to glossary.
404 if check_content_hash(suffix_nodes
['.glo'], '.glo') or (count
== 1 and run_glossaries
) or (count
== 1 and run_glossary
):
405 # We must run makeindex
407 print("Need to run makeindex for glossary")
408 glofile
= suffix_nodes
['.glo']
409 result
= MakeGlossaryAction(glofile
, glofile
, env
)
411 check_file_error_message('%s (glossary)' % env
['MAKEGLOSSARY'],
415 # Now decide if latex will need to be run again due to acronyms.
416 if check_content_hash(suffix_nodes
['.acn'], '.acn') or (count
== 1 and run_acronyms
):
417 # We must run makeindex
419 print("Need to run makeindex for acronyms")
420 acrfile
= suffix_nodes
['.acn']
421 result
= MakeAcronymsAction(acrfile
, acrfile
, env
)
423 check_file_error_message('%s (acronyms)' % env
['MAKEACRONYMS'],
427 # Now decide if latex will need to be run again due to newglossary command.
428 for ng
in newglossary_suffix
:
429 if check_content_hash(suffix_nodes
[ng
[2]], ng
[2]) or (count
== 1):
430 # We must run makeindex
432 print("Need to run makeindex for newglossary")
433 newglfile
= suffix_nodes
[ng
[2]]
434 MakeNewGlossaryAction
= SCons
.Action
.Action(
435 "$MAKENEWGLOSSARYCOM ${SOURCE.filebase}%s -s ${SOURCE.filebase}.ist -t ${SOURCE.filebase}%s -o ${SOURCE.filebase}%s"
436 % (ng
[2], ng
[0], ng
[1]),
437 "$MAKENEWGLOSSARYCOMSTR",
440 result
= MakeNewGlossaryAction(newglfile
, newglfile
, env
)
442 check_file_error_message(
443 '%s (newglossary)' % env
['MAKENEWGLOSSARY'], ng
[0]
447 # Now decide if latex needs to be run yet again to resolve warnings.
448 if warning_rerun_re
.search(logContent
):
449 must_rerun_latex
= True
451 print("rerun Latex due to latex or package rerun warning")
453 if rerun_citations_re
.search(logContent
):
454 must_rerun_latex
= True
456 print("rerun Latex due to 'Rerun to get citations correct' warning")
458 if undefined_references_re
.search(logContent
):
459 must_rerun_latex
= True
461 print("rerun Latex due to undefined references or citations")
463 if count
>= int(env
.subst('$LATEXRETRIES')) and must_rerun_latex
:
464 print("reached max number of retries on Latex ,",int(env
.subst('$LATEXRETRIES')))
467 # rename Latex's output to what the target name is
468 if not (str(target
[0]) == resultfilename
and os
.path
.isfile(resultfilename
)):
469 if os
.path
.isfile(resultfilename
):
470 print("move %s to %s" % (resultfilename
, str(target
[0]), ))
471 shutil
.move(resultfilename
,str(target
[0]))
473 # Original comment (when TEXPICTS was not restored):
474 # The TEXPICTS environment variable is needed by a dvi -> pdf step
475 # later on Mac OSX so leave it
477 # It is also used when searching for pictures (implicit dependencies).
478 # Why not set the variable again in the respective builder instead
479 # of leaving local modifications in the environment? What if multiple
480 # latex builds in different directories need different TEXPICTS?
481 for var
in SCons
.Scanner
.LaTeX
.LaTeX
.env_variables
:
482 if var
== 'TEXPICTS':
484 if saved_env
[var
] is _null
:
490 env
['ENV'][var
] = saved_env
[var
]
494 def LaTeXAuxAction(target
=None, source
=None, env
=None):
495 result
= InternalLaTeXAuxAction(LaTeXAction
, target
, source
, env
)
499 LaTeX_re
= re
.compile("\\\\document(style|class)")
502 def is_LaTeX(flist
, env
, abspath
) -> bool:
503 """Scan a file list to decide if it's TeX- or LaTeX-flavored."""
505 # We need to scan files that are included in case the
506 # \documentclass command is in them.
508 # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS']
509 savedpath
= modify_env_var(env
, "TEXINPUTS", abspath
)
510 paths
= env
["ENV"]["TEXINPUTS"]
511 if SCons
.Util
.is_List(paths
):
514 # Split at os.pathsep to convert into absolute path
515 paths
= paths
.split(os
.pathsep
)
517 # now that we have the path list restore the env
518 if savedpath
is _null
:
520 del env
["ENV"]["TEXINPUTS"]
524 env
["ENV"]["TEXINPUTS"] = savedpath
526 print("is_LaTeX search path ", paths
)
527 print("files to search: ", flist
)
529 # Now that we have the search path and file list, check each one
533 print(f
" checking for Latex source {f}")
535 content
= f
.get_text_contents()
536 if LaTeX_re
.search(content
):
538 print(f
"file {f} is a LaTeX file")
541 print(f
"file {f} is not a LaTeX file")
543 # now find included files
545 inc_files
.extend(include_re
.findall(content
))
547 print(f
"files included by '{f}': ", inc_files
)
548 # inc_files is list of file names as given. need to find them
549 # using TEXINPUTS paths.
551 # search the included files
552 for src
in inc_files
:
554 src
, [".tex", ".ltx", ".latex"], paths
, env
, requireExt
=False
556 # make this a list since is_LaTeX takes a list.
559 print("FindFile found ", srcNode
)
560 if srcNode
is not None:
561 file_test
= is_LaTeX(fileList
, env
, abspath
)
563 # return on first file that finds latex is needed.
568 print(f
" done scanning {f}")
573 def TeXLaTeXFunction(target
= None, source
= None, env
=None):
574 """A builder for TeX and LaTeX that scans the source file to
575 decide the "flavor" of the source and then executes the appropriate
578 # find these paths for use in is_LaTeX to search for included files
579 basedir
= os
.path
.split(str(source
[0]))[0]
580 abspath
= os
.path
.abspath(basedir
)
582 if is_LaTeX(source
,env
,abspath
):
583 result
= LaTeXAuxAction(target
,source
,env
)
585 check_file_error_message(env
['LATEX'])
587 result
= TeXAction(target
,source
,env
)
589 check_file_error_message(env
['TEX'])
592 def TeXLaTeXStrFunction(target
= None, source
= None, env
=None):
593 """A strfunction for TeX and LaTeX that scans the source file to
594 decide the "flavor" of the source and then returns the appropriate
596 if env
.GetOption("no_exec"):
598 # find these paths for use in is_LaTeX to search for included files
599 basedir
= os
.path
.split(str(source
[0]))[0]
600 abspath
= os
.path
.abspath(basedir
)
602 if is_LaTeX(source
,env
,abspath
):
603 result
= env
.subst('$LATEXCOM',0,target
,source
)+" ..."
605 result
= env
.subst("$TEXCOM",0,target
,source
)+" ..."
610 def tex_eps_emitter(target
, source
, env
):
611 """An emitter for TeX and LaTeX sources when
612 executing tex or latex. It will accept .ps and .eps
615 (target
, source
) = tex_emitter_core(target
, source
, env
, TexGraphics
)
617 return (target
, source
)
619 def tex_pdf_emitter(target
, source
, env
):
620 """An emitter for TeX and LaTeX sources when
621 executing pdftex or pdflatex. It will accept graphics
622 files of types .pdf, .jpg, .png, .gif, and .tif
624 (target
, source
) = tex_emitter_core(target
, source
, env
, LatexGraphics
)
626 return (target
, source
)
628 def ScanFiles(theFile
, target
, paths
, file_tests
, file_tests_search
, env
, graphics_extensions
, targetdir
, aux_files
):
629 """ For theFile (a Node) update any file_tests and search for graphics files
630 then find all included files and call ScanFiles recursively for each of them"""
632 content
= theFile
.get_text_contents()
634 print(" scanning ",str(theFile
))
636 for i
in range(len(file_tests_search
)):
637 if file_tests
[i
][0] is None:
639 print("scan i ",i
," files_tests[i] ",file_tests
[i
], file_tests
[i
][1])
640 file_tests
[i
][0] = file_tests_search
[i
].search(content
)
641 if Verbose
and file_tests
[i
][0]:
642 print(" found match for ",file_tests
[i
][1][-1])
643 # for newglossary insert the suffixes in file_tests[i]
644 if file_tests
[i
][0] and file_tests
[i
][1][-1] == 'newglossary':
645 findresult
= file_tests_search
[i
].findall(content
)
646 for l
in range(len(findresult
)) :
647 (file_tests
[i
][1]).insert(0,'.'+findresult
[l
][3])
648 (file_tests
[i
][1]).insert(0,'.'+findresult
[l
][2])
649 (file_tests
[i
][1]).insert(0,'.'+findresult
[l
][0])
650 suffix_list
= ['.'+findresult
[l
][0],'.'+findresult
[l
][2],'.'+findresult
[l
][3] ]
651 newglossary_suffix
.append(suffix_list
)
653 print(" new suffixes for newglossary ",newglossary_suffix
)
656 incResult
= includeOnly_re
.search(content
)
658 aux_files
.append(os
.path
.join(targetdir
, incResult
.group(1)))
660 print(r
"\include file names : ", aux_files
)
661 # recursively call this on each of the included files
663 inc_files
.extend( include_re
.findall(content
) )
665 print("files included by '%s': "%str
(theFile
),inc_files
)
666 # inc_files is list of file names as given. need to find them
667 # using TEXINPUTS paths.
669 for src
in inc_files
:
670 srcNode
= FindFile(src
,['.tex','.ltx','.latex'],paths
,env
,requireExt
=False)
671 if srcNode
is not None:
672 file_tests
= ScanFiles(srcNode
, target
, paths
, file_tests
, file_tests_search
, env
, graphics_extensions
, targetdir
, aux_files
)
674 print(" done scanning ",str(theFile
))
677 def tex_emitter_core(target
, source
, env
, graphics_extensions
):
678 """An emitter for TeX and LaTeX sources.
679 For LaTeX sources we try and find the common created files that
680 are needed on subsequent runs of latex to finish tables of contents,
681 bibliographies, indices, lists of figures, and hyperlink references.
683 basename
= SCons
.Util
.splitext(str(source
[0]))[0]
684 basefile
= os
.path
.split(str(basename
))[1]
685 targetdir
= os
.path
.split(str(target
[0]))[0]
686 targetbase
= os
.path
.join(targetdir
, basefile
)
688 basedir
= os
.path
.split(str(source
[0]))[0]
689 abspath
= os
.path
.abspath(basedir
)
690 target
[0].attributes
.path
= abspath
693 # file names we will make use of in searching the sources and log file
695 emit_suffixes
= ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg', '.alg'] + all_suffixes
696 auxfilename
= targetbase
+ '.aux'
697 logfilename
= targetbase
+ '.log'
698 flsfilename
= targetbase
+ '.fls'
699 syncfilename
= targetbase
+ '.synctex.gz'
701 env
.SideEffect(auxfilename
,target
[0])
702 env
.SideEffect(logfilename
,target
[0])
703 env
.SideEffect(flsfilename
,target
[0])
704 env
.SideEffect(syncfilename
,target
[0])
706 print("side effect :",auxfilename
,logfilename
,flsfilename
,syncfilename
)
707 env
.Clean(target
[0],auxfilename
)
708 env
.Clean(target
[0],logfilename
)
709 env
.Clean(target
[0],flsfilename
)
710 env
.Clean(target
[0],syncfilename
)
712 content
= source
[0].get_text_contents()
714 # set up list with the regular expressions
715 # we use to find features used
716 file_tests_search
= [auxfile_re
,
733 # set up list with the file suffixes that need emitting
734 # when a feature is found
735 file_tests_suff
= [['.aux','aux_file'],
736 ['.idx', '.ind', '.ilg','makeindex'],
737 ['.bbl', '.blg','bibliography'],
738 ['.bbl', '.blg','bibunit'],
739 ['.bbl', '.blg','multibib'],
740 ['.bbl', '.blg','.bcf','addbibresource'],
745 ['.nlo', '.nls', '.nlg','nomenclature'],
746 ['.glo', '.gls', '.glg','glossary'],
747 ['.glo', '.gls', '.glg','glossaries'],
748 ['.acn', '.acr', '.alg','acronyms'],
749 ['.nav', '.snm', '.out', '.toc','beamer'],
751 ['.bcf', '.blg','biblatex'] ]
752 # for newglossary the suffixes are added as we find the command
753 # build the list of lists
755 for i
in range(len(file_tests_search
)):
756 file_tests
.append( [None, file_tests_suff
[i
]] )
758 # TO-DO: need to add a way for the user to extend this list for whatever
759 # auxiliary files they create in other (or their own) packages
761 # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS']
762 savedpath
= modify_env_var(env
, 'TEXINPUTS', abspath
)
763 paths
= env
['ENV']['TEXINPUTS']
764 if SCons
.Util
.is_List(paths
):
767 # Split at os.pathsep to convert into absolute path
768 paths
= paths
.split(os
.pathsep
)
770 # now that we have the path list restore the env
771 if savedpath
is _null
:
773 del env
['ENV']['TEXINPUTS']
777 env
['ENV']['TEXINPUTS'] = savedpath
779 print("search path ",paths
)
781 # scan all sources for side effect files
783 file_tests
= ScanFiles(source
[0], target
, paths
, file_tests
, file_tests_search
, env
, graphics_extensions
, targetdir
, aux_files
)
785 for (theSearch
,suffix_list
) in file_tests
:
786 # add side effects if feature is present.If file is to be generated,add all side effects
787 if Verbose
and theSearch
:
788 print("check side effects for ",suffix_list
[-1])
789 if theSearch
is not None or not source
[0].exists():
790 file_list
= [targetbase
,]
791 # for bibunit we need a list of files
792 if suffix_list
[-1] == 'bibunit':
793 file_basename
= os
.path
.join(targetdir
, 'bu*.aux')
794 file_list
= glob
.glob(file_basename
)
795 # remove the suffix '.aux'
796 for fl
in file_list
.copy():
797 file_list
.append(SCons
.Util
.splitext(fl
)[0])
798 # for multibib we need a list of files
799 if suffix_list
[-1] == 'multibib':
800 for multibibmatch
in multibib_re
.finditer(content
):
802 print("multibib match ",multibibmatch
.group(1))
803 if multibibmatch
is not None:
804 baselist
= multibibmatch
.group(1).split(',')
806 print("multibib list ", baselist
)
808 file_list
.append(os
.path
.join(targetdir
, bl
))
809 # now define the side effects
810 for file_name
in file_list
:
811 for suffix
in suffix_list
[:-1]:
812 env
.SideEffect(file_name
+ suffix
,target
[0])
814 print("side effect tst :",file_name
+ suffix
, " target is ",str(target
[0]))
815 env
.Clean(target
[0],file_name
+ suffix
)
817 for aFile
in aux_files
:
818 aFile_base
= SCons
.Util
.splitext(aFile
)[0]
819 env
.SideEffect(aFile_base
+ '.aux',target
[0])
821 print("side effect aux :",aFile_base
+ '.aux')
822 env
.Clean(target
[0],aFile_base
+ '.aux')
823 # read fls file to get all other files that latex creates and will read on the next pass
824 # remove files from list that we explicitly dealt with above
825 if os
.path
.isfile(flsfilename
):
826 with
open(flsfilename
, "r") as f
:
828 out_files
= openout_re
.findall(content
)
829 myfiles
= [auxfilename
, logfilename
, flsfilename
, targetbase
+'.dvi',targetbase
+'.pdf']
830 for filename
in out_files
[:]:
831 if filename
in myfiles
:
832 out_files
.remove(filename
)
833 env
.SideEffect(out_files
,target
[0])
835 print("side effect fls :",out_files
)
836 env
.Clean(target
[0],out_files
)
838 return (target
, source
)
841 TeXLaTeXAction
= None
843 def generate(env
) -> None:
844 """Add Builders and construction variables for TeX to an Environment."""
846 global TeXLaTeXAction
847 if TeXLaTeXAction
is None:
848 TeXLaTeXAction
= SCons
.Action
.Action(TeXLaTeXFunction
,
849 strfunction
=TeXLaTeXStrFunction
)
851 env
.AppendUnique(LATEXSUFFIXES
=SCons
.Tool
.LaTeXSuffixes
)
858 bld
= env
['BUILDERS']['DVI']
859 bld
.add_action('.tex', TeXLaTeXAction
)
860 bld
.add_emitter('.tex', tex_eps_emitter
)
862 def generate_darwin(env
) -> None:
869 if platform
.system() == 'Darwin':
871 ospath
= env
['ENV']['PATHOSX']
875 env
.AppendENVPath('PATH', ospath
)
877 def generate_common(env
) -> None:
878 """Add internal Builders and construction variables for LaTeX to an Environment."""
880 # Add OSX system paths so TeX tools can be found
881 # when a list of tools is given the exists() method is not called
884 # A generic tex file Action, sufficient for all tex files.
886 if TeXAction
is None:
887 TeXAction
= SCons
.Action
.Action("$TEXCOM", "$TEXCOMSTR")
889 # An Action to build a latex file. This might be needed more
890 # than once if we are dealing with labels and bibtex.
892 if LaTeXAction
is None:
893 LaTeXAction
= SCons
.Action
.Action("$LATEXCOM", "$LATEXCOMSTR")
895 # Define an action to run BibTeX on a file.
897 if BibTeXAction
is None:
898 BibTeXAction
= SCons
.Action
.Action("$BIBTEXCOM", "$BIBTEXCOMSTR")
900 # Define an action to run Biber on a file.
902 if BiberAction
is None:
903 BiberAction
= SCons
.Action
.Action("$BIBERCOM", "$BIBERCOMSTR")
905 # Define an action to run MakeIndex on a file.
906 global MakeIndexAction
907 if MakeIndexAction
is None:
908 MakeIndexAction
= SCons
.Action
.Action("$MAKEINDEXCOM", "$MAKEINDEXCOMSTR")
910 # Define an action to run MakeIndex on a file for nomenclatures.
912 if MakeNclAction
is None:
913 MakeNclAction
= SCons
.Action
.Action("$MAKENCLCOM", "$MAKENCLCOMSTR")
915 # Define an action to run MakeIndex on a file for glossaries.
916 global MakeGlossaryAction
917 if MakeGlossaryAction
is None:
918 MakeGlossaryAction
= SCons
.Action
.Action("$MAKEGLOSSARYCOM", "$MAKEGLOSSARYCOMSTR")
920 # Define an action to run MakeIndex on a file for acronyms.
921 global MakeAcronymsAction
922 if MakeAcronymsAction
is None:
923 MakeAcronymsAction
= SCons
.Action
.Action("$MAKEACRONYMSCOM", "$MAKEACRONYMSCOMSTR")
931 # Some Linux platforms have pdflatex set up in a way
932 # that requires that the HOME environment variable be set.
933 # Add it here if defined.
934 v
= os
.environ
.get('HOME')
939 if platform
.system() == 'Windows':
940 # allow cd command to change drives on Windows
944 env
['TEXFLAGS'] = SCons
.Util
.CLVar('-interaction=nonstopmode -recorder')
945 env
['TEXCOM'] = CDCOM
+ '${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}'
947 env
['PDFTEX'] = 'pdftex'
948 env
['PDFTEXFLAGS'] = SCons
.Util
.CLVar('-interaction=nonstopmode -recorder')
949 env
['PDFTEXCOM'] = CDCOM
+ '${TARGET.dir} && $PDFTEX $PDFTEXFLAGS ${SOURCE.file}'
951 env
['LATEX'] = 'latex'
952 env
['LATEXFLAGS'] = SCons
.Util
.CLVar('-interaction=nonstopmode -recorder')
953 env
['LATEXCOM'] = CDCOM
+ '${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}'
954 env
['LATEXRETRIES'] = 4
956 env
['PDFLATEX'] = 'pdflatex'
957 env
['PDFLATEXFLAGS'] = SCons
.Util
.CLVar('-interaction=nonstopmode -recorder')
958 env
['PDFLATEXCOM'] = CDCOM
+ '${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}'
960 env
['BIBTEX'] = 'bibtex'
961 env
['BIBTEXFLAGS'] = SCons
.Util
.CLVar('')
962 env
['BIBTEXCOM'] = CDCOM
+ '${TARGET.dir} && $BIBTEX $BIBTEXFLAGS ${SOURCE.filebase}'
964 env
['BIBER'] = 'biber'
965 env
['BIBERFLAGS'] = SCons
.Util
.CLVar('')
966 env
['BIBERCOM'] = CDCOM
+ '${TARGET.dir} && $BIBER $BIBERFLAGS ${SOURCE.filebase}'
968 env
['MAKEINDEX'] = 'makeindex'
969 env
['MAKEINDEXFLAGS'] = SCons
.Util
.CLVar('')
970 env
['MAKEINDEXCOM'] = CDCOM
+ '${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}'
972 env
['MAKEGLOSSARY'] = 'makeindex'
973 env
['MAKEGLOSSARYSTYLE'] = '${SOURCE.filebase}.ist'
974 env
['MAKEGLOSSARYFLAGS'] = SCons
.Util
.CLVar('-s ${MAKEGLOSSARYSTYLE} -t ${SOURCE.filebase}.glg')
975 env
['MAKEGLOSSARYCOM'] = CDCOM
+ '${TARGET.dir} && $MAKEGLOSSARY ${SOURCE.filebase}.glo $MAKEGLOSSARYFLAGS -o ${SOURCE.filebase}.gls'
977 env
['MAKEACRONYMS'] = 'makeindex'
978 env
['MAKEACRONYMSSTYLE'] = '${SOURCE.filebase}.ist'
979 env
['MAKEACRONYMSFLAGS'] = SCons
.Util
.CLVar('-s ${MAKEACRONYMSSTYLE} -t ${SOURCE.filebase}.alg')
980 env
['MAKEACRONYMSCOM'] = CDCOM
+ '${TARGET.dir} && $MAKEACRONYMS ${SOURCE.filebase}.acn $MAKEACRONYMSFLAGS -o ${SOURCE.filebase}.acr'
982 env
['MAKENCL'] = 'makeindex'
983 env
['MAKENCLSTYLE'] = 'nomencl.ist'
984 env
['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg'
985 env
['MAKENCLCOM'] = CDCOM
+ '${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls'
987 env
['MAKENEWGLOSSARY'] = 'makeindex'
988 env
['MAKENEWGLOSSARYCOM'] = CDCOM
+ '${TARGET.dir} && $MAKENEWGLOSSARY '
992 return env
.Detect('tex')
996 # indent-tabs-mode:nil
998 # vim: set expandtab tabstop=4 shiftwidth=4: