scons: Stop generating inc.d in the isa parser.

Generating dependency/build product information in the isa parser breaks scons
idea of how a build is supposed to work. Arm twisting it into working forced
a lot of false dependencies which slowed down the build.

Change-Id: Iadee8c930fd7c80136d200d69870df7672a6b3ca
Reviewed-on: https://gem5-review.googlesource.com/5081
Reviewed-by: Jason Lowe-Power <jason@lowepower.com>
Maintainer: Gabe Black <gabeblack@google.com>
This commit is contained in:
Gabe Black
2017-10-14 21:39:21 -07:00
parent 1b63b756cc
commit b8efd0e854
12 changed files with 144 additions and 251 deletions

View File

@@ -1407,41 +1407,12 @@ def switching_headers(self, headers, source):
main.AddMethod(switching_headers, 'SwitchingHeaders')
# all-isas -> all-deps -> all-environs -> all_targets
main.Alias('#all-isas', [])
main.Alias('#all-deps', '#all-isas')
# Dummy target to ensure all environments are created before telling
# SCons what to actually make (the command line arguments). We attach
# them to the dependence graph after the environments are complete.
ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
def environsComplete(target, source, env):
for t in ORIG_BUILD_TARGETS:
main.Depends('#all-targets', t)
# Each build/* switching_dir attaches its *-environs target to #all-environs.
main.Append(BUILDERS = {'CompleteEnvirons' :
Builder(action=MakeAction(environsComplete, None))})
main.CompleteEnvirons('#all-environs', [])
def doNothing(**ignored): pass
main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
# The final target to which all the original targets ultimately get attached.
main.Dummy('#all-targets', '#all-environs')
BUILD_TARGETS[:] = ['#all-targets']
###################################################
#
# Define build environments for selected configurations.
#
###################################################
def variant_name(path):
return os.path.basename(path).lower().replace('_', '-')
main['variant_name'] = variant_name
main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}'
for variant_path in variant_paths:
if not GetOption('silent'):
print "Building in", variant_path
@@ -1553,26 +1524,6 @@ for variant_path in variant_paths:
# one for each variant build (debug, opt, etc.)
SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = itertools.tee(iterable)
b.next()
return itertools.izip(a, b)
variant_names = [variant_name(path) for path in variant_paths]
# Create false dependencies so SCons will parse ISAs, establish
# dependencies, and setup the build Environments serially. Either
# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
# greater than 1. It appears to be standard race condition stuff; it
# doesn't always fail, but usually, and the behaviors are different.
# Every time I tried to remove this, builds would fail in some
# creative new way. So, don't do that. You'll want to, though, because
# tests/SConscript takes a long time to make its Environments.
for t1, t2 in pairwise(sorted(variant_names)):
main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
# base help text
Help('''
Usage: scons [scons options] [build variables] [target(s)]

View File

@@ -945,14 +945,6 @@ for source in PySource.all:
# List of constructed environments to pass back to SConstruct
date_source = Source('base/date.cc', skip_lib=True)
# Capture this directory for the closure makeEnv, otherwise when it is
# called, it won't know what directory it should use.
variant_dir = Dir('.').path
def variant(*path):
return os.path.join(variant_dir, *path)
def variantd(*path):
return variant(*path)+'/'
# Function to create a new build environment as clone of current
# environment 'env' with modified object suffix and optional stripped
# binary. Additional keyword arguments are appended to corresponding
@@ -960,9 +952,9 @@ def variantd(*path):
def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
# SCons doesn't know to append a library suffix when there is a '.' in the
# name. Use '_' instead.
libname = variant('gem5_' + label)
exename = variant('gem5.' + label)
secondary_exename = variant('m5.' + label)
libname = 'gem5_' + label
exename = 'gem5.' + label
secondary_exename = 'm5.' + label
new_env = env.Clone(OBJSUFFIX=objsfx, SHOBJSUFFIX=objsfx + 's')
new_env.Label = label
@@ -1101,7 +1093,7 @@ def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
test_objs = [ make_obj(s, static=True) for s in test_sources ]
if test.main:
test_objs += main_objs
path = variant('unittest/%s.%s' % (test.target, label))
path = 'unittest/%s.%s' % (test.target, label)
new_env.Program(path, test_objs + static_objs)
progname = exename
@@ -1125,7 +1117,7 @@ def makeEnv(env, label, objsfx, strip=False, disable_partial=False, **kwargs):
# Set up regression tests.
SConscript(os.path.join(env.root.abspath, 'tests', 'SConscript'),
variant_dir=variantd('tests', new_env.Label),
variant_dir=Dir('tests').Dir(new_env.Label).path,
exports={ 'env' : new_env }, duplicate=False)
# Start out with the compiler flags common to all compilers,
@@ -1192,67 +1184,41 @@ needed_envs = [identifyTarget(target) for target in BUILD_TARGETS]
if 'all' in needed_envs:
needed_envs += target_types
def makeEnvirons(target, source, env):
# cause any later Source() calls to be fatal, as a diagnostic.
Source.done()
# Debug binary
if 'debug' in needed_envs:
makeEnv(env, 'debug', '.do',
CCFLAGS = Split(ccflags['debug']),
CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
LINKFLAGS = Split(ldflags['debug']))
# Debug binary
if 'debug' in needed_envs:
makeEnv(env, 'debug', '.do',
CCFLAGS = Split(ccflags['debug']),
CPPDEFINES = ['DEBUG', 'TRACING_ON=1'],
LINKFLAGS = Split(ldflags['debug']))
# Optimized binary
if 'opt' in needed_envs:
makeEnv(env, 'opt', '.o',
CCFLAGS = Split(ccflags['opt']),
CPPDEFINES = ['TRACING_ON=1'],
LINKFLAGS = Split(ldflags['opt']))
# Optimized binary
if 'opt' in needed_envs:
makeEnv(env, 'opt', '.o',
CCFLAGS = Split(ccflags['opt']),
CPPDEFINES = ['TRACING_ON=1'],
LINKFLAGS = Split(ldflags['opt']))
# "Fast" binary
if 'fast' in needed_envs:
disable_partial = \
env.get('BROKEN_INCREMENTAL_LTO', False) and \
GetOption('force_lto')
makeEnv(env, 'fast', '.fo', strip = True,
CCFLAGS = Split(ccflags['fast']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['fast']),
disable_partial=disable_partial)
# "Fast" binary
if 'fast' in needed_envs:
disable_partial = \
env.get('BROKEN_INCREMENTAL_LTO', False) and \
GetOption('force_lto')
makeEnv(env, 'fast', '.fo', strip = True,
CCFLAGS = Split(ccflags['fast']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['fast']),
disable_partial=disable_partial)
# Profiled binary using gprof
if 'prof' in needed_envs:
makeEnv(env, 'prof', '.po',
CCFLAGS = Split(ccflags['prof']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['prof']))
# Profiled binary using gprof
if 'prof' in needed_envs:
makeEnv(env, 'prof', '.po',
CCFLAGS = Split(ccflags['prof']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['prof']))
# Profiled binary using google-pprof
if 'perf' in needed_envs:
makeEnv(env, 'perf', '.gpo',
CCFLAGS = Split(ccflags['perf']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['perf']))
# The MakeEnvirons Builder defers the full dependency collection until
# after processing the ISA definition (due to dynamically generated
# source files). Add this dependency to all targets so they will wait
# until the environments are completely set up. Otherwise, a second
# process (e.g. -j2 or higher) will try to compile the requested target,
# not know how, and fail.
env.Append(BUILDERS = {'MakeEnvirons' :
Builder(action=MakeAction(makeEnvirons,
Transform("ENVIRONS", 1)))})
isa_target = '#${VARIANT_NAME}-deps'
environs = '#${VARIANT_NAME}-environs'
env.Depends('#all-deps', isa_target)
env.Depends('#all-environs', environs)
env.ScanISA(isa_target, File('arch/%s/generated/inc.d' % env['TARGET_ISA']))
envSetup = env.MakeEnvirons(environs, isa_target)
# make sure no -deps targets occur before all ISAs are complete
env.Depends(isa_target, '#all-isas')
# likewise for -environs targets and all the -deps targets
env.Depends(environs, '#all-deps')
# Profiled binary using google-pprof
if 'perf' in needed_envs:
makeEnv(env, 'perf', '.gpo',
CCFLAGS = Split(ccflags['perf']),
CPPDEFINES = ['NDEBUG', 'TRACING_ON=0'],
LINKFLAGS = Split(ldflags['perf']))

View File

@@ -97,120 +97,130 @@ if env['BUILD_GPU']:
# Build a SCons scanner for ISA files
#
import SCons.Scanner
import SCons.Tool
isa_scanner = SCons.Scanner.Classic("ISAScan",
[".isa", ".ISA"],
"SRCDIR",
r'^\s*##include\s+"([\w/.-]*)"')
scanner = SCons.Scanner.Classic("ISAScan",
[".isa", ".ISA"],
"SRCDIR",
r'^\s*##include\s+"([\w/.-]*)"')
env.Append(SCANNERS = isa_scanner)
env.Append(SCANNERS=scanner)
# Tell scons that when it sees a cc.inc file, it should scan it for includes.
SCons.Tool.SourceFileScanner.add_scanner('.cc.inc', SCons.Tool.CScanner)
#
# Now create a Builder object that uses isa_parser.py to generate C++
# output from the ISA description (*.isa) files.
#
isa_parser = File('isa_parser.py')
# The emitter patches up the sources & targets to include the
# autogenerated files as targets and isa parser itself as a source.
def isa_desc_emitter(target, source, env):
# List the isa parser as a source.
source += [
isa_parser,
Value("ExecContext"),
]
# Specify different targets depending on if we're running the ISA
# parser for its dependency information, or for the generated files.
# (As an optimization, the ISA parser detects the useless second run
# and skips doing any work, if the first run was performed, since it
# always generates all its files). The way we track this in SCons is the
# <arch>_isa_outputs value in the environment (env). If it's unset, we
# don't know what the dependencies are so we ask for generated/inc.d to
# be generated so they can be acquired. If we know what they are, then
# it's because we've already processed inc.d and then claim that our
# outputs (targets) will be thus.
isa = env['TARGET_ISA']
key = '%s_isa_outputs' % isa
if key in env:
targets = [ os.path.join('generated', f) for f in env[key] ]
else:
targets = [ os.path.join('generated','inc.d') ]
def prefix(s):
return os.path.join(target[0].dir.up().abspath, s)
return [ prefix(t) for t in targets ], source
ARCH_DIR = Dir('.')
parser_py = File('isa_parser.py')
micro_asm_py = File('micro_asm.py')
# import ply here because SCons screws with sys.path when performing actions.
import ply
def isa_desc_action_func(target, source, env):
# Add the current directory to the system path so we can import files
sys.path[0:0] = [ ARCH_DIR.srcnode().abspath ]
def run_parser(target, source, env):
# Add the current directory to the system path so we can import files.
sys.path[0:0] = [ parser_py.dir.abspath ]
import isa_parser
# Skip over the ISA description itself and the parser to the CPU models.
models = [ s.get_contents() for s in source[2:] ]
parser = isa_parser.ISAParser(target[0].dir.abspath)
parser.parse_isa_desc(source[0].abspath)
isa_desc_action = MakeAction(isa_desc_action_func, Transform("ISA DESC", 1))
# Also include the CheckerCPU as one of the models if it is being
# enabled via command line.
isa_desc_builder = Builder(action=isa_desc_action, emitter=isa_desc_emitter)
desc_action = MakeAction(run_parser, Transform("ISA DESC", 1))
env.Append(BUILDERS = { 'ISADesc' : isa_desc_builder })
IsaDescBuilder = Builder(action=desc_action)
# The ISA is generated twice: the first time to find out what it generates,
# and the second time to make scons happy by telling the ISADesc builder
# what it will make before it builds it.
def scan_isa_deps(target, source, env):
# Process dependency file generated by the ISA parser --
# add the listed files to the dependency tree of the build.
source = source[0]
archbase = source.dir.up().path
try:
depfile = open(source.abspath, 'r')
except:
print "scan_isa_deps: Can't open ISA deps file '%s' in %s" % \
(source.path,os.getcwd())
raise
# ISAs should use this function to set up an IsaDescBuilder and not try to
# set one up manually.
def ISADesc(desc, decoder_splits=1, exec_splits=1):
'''Set up a builder for an ISA description.
# Scan through the lines
targets = {}
for line in depfile:
# Read the dependency line with the format
# <target file>: [ <dependent file>* ]
m = re.match(r'^\s*([^:]+\.([^\.:]+))\s*:\s*(.*)', line)
assert(m)
targ, extn = m.group(1,2)
deps = m.group(3).split()
The decoder_splits and exec_splits parameters let us determine what
files the isa parser is actually going to generate. This needs to match
what files are actually generated, and there's no specific check for that
right now.
files = [ targ ] + deps
for f in files:
targets[f] = True
# Eliminate unnecessary re-generation if we already generated it
env.Precious(os.path.join(archbase, 'generated', f))
If the parser itself is responsible for generating a list of its products
and their dependencies, then using that output to set up the right
dependencies. This is what we used to do. The problem is that scons
fundamentally doesn't support using a build product to affect its graph
of possible products, dependencies, builders, etc. There are a couple ways
to work around that limitation.
files = [ os.path.join(archbase, 'generated', f) for f in files ]
One option is to compute dependencies while the build phase of scons is
running. That method can be quite complicated and cumbersome, because we
have to make sure our modifications are made before scons tries to
consume them. There's also no guarantee that this mechanism will work since
it subverts scons expectations and changes things behind its back. This
was implemented previously and constrained the builds parallelism
significantly.
if extn == 'cc':
Source(os.path.join(archbase,'generated', targ))
depfile.close()
env[env['TARGET_ISA'] + '_isa_outputs'] = targets.keys()
Another option would be to recursively call scons to have it update the
list of products/dependencies during the setup phase of this invocation of
scons. The problem with that is that it would be very difficult to make
the sub-invocation of scons observe the options passed to the primary one
in all possible cases, or to even determine conclusively what the name of
the scons executable is in the first place.
isa = env.ISADesc(os.path.join(archbase,'isa','main.isa'))
for t in targets:
env.Depends('#all-isas', isa)
Possible future changes to the isa parser might make it easier to
determine what files it would generate, perhaps because there was a more
direct correspondence between input files and output files. Or, if the
parser could run quickly and determine what its output files would be
without having do actually generate those files, then it could be run
unconditionally without slowing down all builds or touching the output
files unnecessarily.
'''
generated_dir = File(desc).dir.up().Dir('generated')
def gen_file(name):
return generated_dir.File(name)
env.Append(BUILDERS = {'ScanISA' :
Builder(action=MakeAction(scan_isa_deps,
Transform("NEW DEPS", 1)))})
gen = []
def add_gen(name):
gen.append(gen_file(name))
# Tell scons about the various files the ISA parser will generate.
add_gen('decoder-g.cc.inc')
add_gen('decoder-ns.cc.inc')
add_gen('decode-method.cc.inc')
add_gen('decoder.hh')
add_gen('decoder-g.hh.inc')
add_gen('decoder-ns.hh.inc')
add_gen('exec-g.cc.inc')
add_gen('exec-ns.cc.inc')
add_gen('max_inst_regs.hh')
# These generated files are also top level sources.
def source_gen(name):
add_gen(name)
Source(gen_file(name))
source_gen('decoder.cc')
if decoder_splits == 1:
source_gen('inst-constrs.cc')
else:
for i in range(1, decoder_splits + 1):
source_gen('inst-constrs-%d.cc' % i)
if exec_splits == 1:
source_gen('generic_cpu_exec.cc')
else:
for i in range(1, exec_splits + 1):
source_gen('generic_cpu_exec_%d.cc' % i)
# Actually create the builder.
sources = [desc, parser_py, micro_asm_py]
IsaDescBuilder(target=gen, source=sources, env=env)
return gen
Export('ISADesc')
DebugFlag('IntRegs')
DebugFlag('FloatRegs')

View File

@@ -61,4 +61,4 @@ if env['TARGET_ISA'] == 'alpha':
SimObject('AlphaTLB.py')
# Add in files generated by the ISA description.
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa')

View File

@@ -98,4 +98,4 @@ if env['TARGET_ISA'] == 'arm':
DebugFlag('TLBVerbose')
# Add files generated by the ISA description.
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa', decoder_splits=3, exec_splits=6)

View File

@@ -1573,46 +1573,31 @@ class ISAParser(Grammar):
# These small files make it much clearer how this tool works, since
# you directly see the chunks emitted as files that are #include'd.
def write_top_level_files(self):
dep = self.open('inc.d', bare=True)
# decoder header - everything depends on this
file = 'decoder.hh'
with self.open(file) as f:
inc = []
fn = 'decoder-g.hh.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'decoder-ns.hh.inc'
assert(fn in self.files)
f.write('namespace %s {\n#include "%s"\n}\n'
% (self.namespace, fn))
inc.append(fn)
print >>dep, file+':', ' '.join(inc)
# decoder method - cannot be split
file = 'decoder.cc'
with self.open(file) as f:
inc = []
fn = 'decoder-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'decode-method.cc.inc'
# is guaranteed to have been written for parse to complete
f.write('#include "%s"\n' % fn)
inc.append(fn)
print >>dep, file+':', ' '.join(inc)
extn = re.compile('(\.[^\.]+)$')
@@ -1625,16 +1610,12 @@ class ISAParser(Grammar):
else:
file = file_
with self.open(file) as f:
inc = []
fn = 'decoder-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'decoder-ns.cc.inc'
assert(fn in self.files)
@@ -1643,9 +1624,6 @@ class ISAParser(Grammar):
print >>f, '#define __SPLIT %u' % i
print >>f, '#include "%s"' % fn
print >>f, '}'
inc.append(fn)
print >>dep, file+':', ' '.join(inc)
# instruction execution per-CPU model
splits = self.splits[self.get_file('exec')]
@@ -1656,18 +1634,14 @@ class ISAParser(Grammar):
else:
file = cpu.filename
with self.open(file) as f:
inc = []
fn = 'exec-g.cc.inc'
assert(fn in self.files)
f.write('#include "%s"\n' % fn)
inc.append(fn)
f.write(cpu.includes+"\n")
fn = 'decoder.hh'
f.write('#include "%s"\n' % fn)
inc.append(fn)
fn = 'exec-ns.cc.inc'
assert(fn in self.files)
@@ -1678,10 +1652,6 @@ class ISAParser(Grammar):
print >>f, '#define __SPLIT %u' % i
print >>f, '#include "%s"' % fn
print >>f, '}'
inc.append(fn)
inc.append("decoder.hh")
print >>dep, file+':', ' '.join(inc)
# max_inst_regs.hh
self.update('max_inst_regs.hh',
@@ -1689,10 +1659,6 @@ class ISAParser(Grammar):
const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
const int MaxInstDestRegs = %(maxInstDestRegs)d;
const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
print >>dep, 'max_inst_regs.hh:'
dep.close()
scaremonger_template ='''// DO NOT EDIT
// This file was automatically generated from an ISA description:

View File

@@ -59,4 +59,4 @@ if env['TARGET_ISA'] == 'mips':
DebugFlag('MipsPRA')
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa')

View File

@@ -59,4 +59,4 @@ if env['TARGET_ISA'] == 'power':
DebugFlag('Power')
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa')

View File

@@ -70,4 +70,4 @@ if env['TARGET_ISA'] == 'riscv':
DebugFlag('RiscvTLB')
# Add in files generated by the ISA description.
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa')

View File

@@ -61,4 +61,4 @@ if env['TARGET_ISA'] == 'sparc':
DebugFlag('Sparc', "Generic SPARC ISA stuff")
DebugFlag('RegisterWindows', "Register window manipulation")
env.ISADesc('isa/main.isa')
ISADesc('isa/main.isa')

View File

@@ -307,7 +307,7 @@ if env['TARGET_ISA'] == 'x86':
# Add in files generated by the ISA description.
isa_desc_files = env.ISADesc('isa/main.isa')
isa_desc_files = ISADesc('isa/main.isa')
for f in isa_desc_files:
# Add in python file dependencies that won't be caught otherwise
for pyfile in python_files: