Scons-Wiki: variable substituion and hierarchical doxygen (Reiners)
Improvements added by Dirk Reiners: I added two (at least for me ;)) important features of doxygen: variable substituion and hierarchical doxygen files. Variable substituion allows doxygen to reference variables from the scons environment using $(VARNAME). This is very useful for things like version numbers or for only having certain parts (as defined by scons) included in the documentation without having to mess with doxygen files. Hierarchical doxygen files just interpret the @INCLUDE key as an include. I also had trouble with files that started with a key, I fixed that. Note that I'm a python newbie, so there are probably more elegant ways to do some of the things I did. Feel free to change them. Hope it helps.
This commit is contained in:
parent
b3ed295388
commit
153dcfb5b2
1 changed files with 54 additions and 13 deletions
61
__init__.py
61
__init__.py
|
|
@ -1,4 +1,8 @@
|
||||||
# vim: set et sw=3 tw=0 fo=awqorc ft=python:
|
# SCons Doxygen Bilder
|
||||||
|
#
|
||||||
|
# Copyright (C) 2007 Dirk Reiners
|
||||||
|
#
|
||||||
|
# based on the version from http://www.scons.org/wiki/DoxygenBuilder?highlight=%28doxygen%29
|
||||||
#
|
#
|
||||||
# Astxx, the Asterisk C++ API and Utility Library.
|
# Astxx, the Asterisk C++ API and Utility Library.
|
||||||
# Copyright (C) 2005, 2006 Matthew A. Nicholson
|
# Copyright (C) 2005, 2006 Matthew A. Nicholson
|
||||||
|
|
@ -21,8 +25,9 @@ import os
|
||||||
import os.path
|
import os.path
|
||||||
import glob
|
import glob
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
|
import subprocess
|
||||||
|
|
||||||
def DoxyfileParse(file_contents):
|
def DoxyfileParse(file_contents, file_dir, env):
|
||||||
"""
|
"""
|
||||||
Parse a Doxygen source file and return a dictionary of all the values.
|
Parse a Doxygen source file and return a dictionary of all the values.
|
||||||
Values will be strings and lists of strings.
|
Values will be strings and lists of strings.
|
||||||
|
|
@ -30,8 +35,9 @@ def DoxyfileParse(file_contents):
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
import shlex
|
import shlex
|
||||||
|
|
||||||
lex = shlex.shlex(instream = file_contents, posix = True)
|
lex = shlex.shlex(instream = file_contents, posix = True)
|
||||||
lex.wordchars += "*+./-:"
|
lex.wordchars += "*+./-:@$()"
|
||||||
lex.whitespace = lex.whitespace.replace("\n", "")
|
lex.whitespace = lex.whitespace.replace("\n", "")
|
||||||
lex.escape = ""
|
lex.escape = ""
|
||||||
|
|
||||||
|
|
@ -39,11 +45,24 @@ def DoxyfileParse(file_contents):
|
||||||
token = lex.get_token()
|
token = lex.get_token()
|
||||||
key = token # the first token should be a key
|
key = token # the first token should be a key
|
||||||
last_token = ""
|
last_token = ""
|
||||||
key_token = False
|
key_token = True
|
||||||
next_key = False
|
|
||||||
new_data = True
|
new_data = True
|
||||||
|
|
||||||
def append_data(data, key, new_data, token):
|
def append_data(data, key, new_data, token):
|
||||||
|
if token[:2] == "$(":
|
||||||
|
try:
|
||||||
|
token = env[token[2:-1]]
|
||||||
|
except KeyError:
|
||||||
|
print "ERROR: Variable %s used in Doxygen file is not in environment!" % token
|
||||||
|
token = ""
|
||||||
|
# Convert space-separated list to actual list
|
||||||
|
token = token.split()
|
||||||
|
if len(token):
|
||||||
|
append_data(data, key, new_data, token[0])
|
||||||
|
for i in token[1:]:
|
||||||
|
append_data(data, key, True, i)
|
||||||
|
return
|
||||||
|
|
||||||
if new_data or len(data[key]) == 0:
|
if new_data or len(data[key]) == 0:
|
||||||
data[key].append(token)
|
data[key].append(token)
|
||||||
else:
|
else:
|
||||||
|
|
@ -64,6 +83,13 @@ def DoxyfileParse(file_contents):
|
||||||
data[key] = list()
|
data[key] = list()
|
||||||
elif token == "=":
|
elif token == "=":
|
||||||
data[key] = list()
|
data[key] = list()
|
||||||
|
elif key == "@INCLUDE":
|
||||||
|
|
||||||
|
filename = token
|
||||||
|
if not os.path.isabs(filename):
|
||||||
|
filename = os.path.join(file_dir, filename)
|
||||||
|
|
||||||
|
lex.push_source(open(filename), filename)
|
||||||
else:
|
else:
|
||||||
append_data( data, key, new_data, token )
|
append_data( data, key, new_data, token )
|
||||||
new_data = True
|
new_data = True
|
||||||
|
|
@ -75,6 +101,7 @@ def DoxyfileParse(file_contents):
|
||||||
new_data = False
|
new_data = False
|
||||||
append_data( data, key, new_data, '\\' )
|
append_data( data, key, new_data, '\\' )
|
||||||
|
|
||||||
|
|
||||||
# compress lists of len 1 into single strings
|
# compress lists of len 1 into single strings
|
||||||
for (k, v) in data.items():
|
for (k, v) in data.items():
|
||||||
if len(v) == 0:
|
if len(v) == 0:
|
||||||
|
|
@ -107,7 +134,9 @@ def DoxySourceScan(node, env, path):
|
||||||
|
|
||||||
sources = []
|
sources = []
|
||||||
|
|
||||||
data = DoxyfileParse(node.get_contents())
|
conf_dir = os.path.dirname(str(node))
|
||||||
|
|
||||||
|
data = DoxyfileParse(node.get_contents(), conf_dir, env)
|
||||||
|
|
||||||
if data.get("RECURSIVE", "NO") == "YES":
|
if data.get("RECURSIVE", "NO") == "YES":
|
||||||
recursive = True
|
recursive = True
|
||||||
|
|
@ -118,6 +147,8 @@ def DoxySourceScan(node, env, path):
|
||||||
exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
|
exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
|
||||||
|
|
||||||
for node in data.get("INPUT", []):
|
for node in data.get("INPUT", []):
|
||||||
|
if not os.path.isabs(node):
|
||||||
|
node = os.path.join(conf_dir, node)
|
||||||
if os.path.isfile(node):
|
if os.path.isfile(node):
|
||||||
sources.append(node)
|
sources.append(node)
|
||||||
elif os.path.isdir(node):
|
elif os.path.isdir(node):
|
||||||
|
|
@ -154,7 +185,7 @@ def DoxyEmitter(source, target, env):
|
||||||
"XML": ("NO", "xml"),
|
"XML": ("NO", "xml"),
|
||||||
}
|
}
|
||||||
|
|
||||||
data = DoxyfileParse(source[0].get_contents())
|
data = DoxyfileParse(source[0].get_contents(), os.path.dirname(str(source[0])), env)
|
||||||
|
|
||||||
targets = []
|
targets = []
|
||||||
out_dir = data.get("OUTPUT_DIRECTORY", ".")
|
out_dir = data.get("OUTPUT_DIRECTORY", ".")
|
||||||
|
|
@ -162,6 +193,7 @@ def DoxyEmitter(source, target, env):
|
||||||
# add our output locations
|
# add our output locations
|
||||||
for (k, v) in output_formats.items():
|
for (k, v) in output_formats.items():
|
||||||
if data.get("GENERATE_" + k, v[0]) == "YES":
|
if data.get("GENERATE_" + k, v[0]) == "YES":
|
||||||
|
print os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))
|
||||||
targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
|
targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
|
||||||
|
|
||||||
# don't clobber targets
|
# don't clobber targets
|
||||||
|
|
@ -174,6 +206,17 @@ def DoxyEmitter(source, target, env):
|
||||||
|
|
||||||
return (targets, source)
|
return (targets, source)
|
||||||
|
|
||||||
|
|
||||||
|
def DoxyAction(source, target, env):
|
||||||
|
"""Doxygen action"""
|
||||||
|
e={}
|
||||||
|
for k,v in env.Dictionary().iteritems():
|
||||||
|
e[k] = str(v)
|
||||||
|
p = subprocess.Popen("cd %s && %s %s" %
|
||||||
|
(os.path.dirname(str(source[0])), env["DOXYGEN"], os.path.basename(str(source[0]))),
|
||||||
|
shell=True, env=e)
|
||||||
|
sts = os.waitpid(p.pid, 0)
|
||||||
|
|
||||||
def generate(env):
|
def generate(env):
|
||||||
"""
|
"""
|
||||||
Add builders and construction variables for the
|
Add builders and construction variables for the
|
||||||
|
|
@ -186,12 +229,10 @@ def generate(env):
|
||||||
)
|
)
|
||||||
|
|
||||||
doxyfile_builder = env.Builder(
|
doxyfile_builder = env.Builder(
|
||||||
action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}"),
|
action = DoxyAction,
|
||||||
emitter = DoxyEmitter,
|
emitter = DoxyEmitter,
|
||||||
target_factory = env.fs.Entry,
|
target_factory = env.fs.Entry,
|
||||||
single_source = True,
|
single_source = True,
|
||||||
|
|
||||||
|
|
||||||
source_scanner = doxyfile_scanner,
|
source_scanner = doxyfile_scanner,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue