blob: c1608759c229dc10efeb53c9c645e2db949370d3 [file] [log] [blame]
# Copyright 2017 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Runs ANGLE format table and other script code generation scripts.
import argparse
from concurrent import futures
import hashlib
import json
import os
import subprocess
import sys
import platform
script_dir = sys.path[0]
root_dir = os.path.abspath(os.path.join(script_dir, '..'))
hash_dir = os.path.join(script_dir, 'code_generation_hashes')
def get_child_script_dirname(script):
# All script names are relative to ANGLE's root
return os.path.dirname(os.path.abspath(os.path.join(root_dir, script)))
def get_executable_name(script):
with open(script, 'r') as f:
# Check shebang
binary = os.path.basename(f.readline().strip().replace(' ', '/'))
assert binary in ['python3', 'vpython3']
if platform.system() == 'Windows':
return binary + '.bat'
return binary
def paths_from_auto_script(script, param):
script_dir = get_child_script_dirname(script)
# python3 (not vpython3) to get inputs/outputs faster
exe = 'python3'
res = subprocess.check_output([exe, os.path.basename(script), param],
except Exception:
print('Error with auto_script %s: %s, executable %s' % (param, script, exe))
if res == '':
return []
return [
os.path.relpath(os.path.join(script_dir, path), root_dir).replace("\\", "/")
for path in res.split(',')
# auto_script is a standard way for scripts to return their inputs and outputs.
def auto_script(script):
info = {
'inputs': paths_from_auto_script(script, 'inputs'),
'outputs': paths_from_auto_script(script, 'outputs')
return info
generators = {
'ANGLE format':
'ANGLE load functions table':
'ANGLE shader preprocessor':
'ANGLE shader translator':
'D3D11 blit shader selection':
'D3D11 format':
'DXGI format':
'DXGI format support':
'Emulated HLSL functions':
'Extension files':
'GL copy conversion table':
'GL CTS (dEQP) build files':
'GL/EGL/WGL loader':
'GL/EGL entry points':
'GLenum value to string map':
'GL format map':
'interpreter utils':
'Metal format table':
'Metal default shaders':
'OpenGL dispatch table':
'overlay fonts':
'overlay widgets':
'packed enum':
'proc table':
'restricted traces':
'SPIR-V helpers':
'Static builtins':
'uniform type':
'Vulkan format':
'Vulkan internal shader programs':
'Vulkan mandatory format support table':
# Fast and supports --verify-only without hashes.
hashless_generators = {
'ANGLE features': 'include/platform/',
'Test spec JSON': 'infra/specs/',
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "r") as f:
for chunk in iter(lambda:, ""):
return hash_md5.hexdigest()
def get_hash_file_name(name):
return name.replace(' ', '_').replace('/', '_') + '.json'
def any_hash_dirty(name, filenames, new_hashes, old_hashes):
found_dirty_hash = False
for fname in filenames:
if not os.path.isfile(os.path.join(root_dir, fname)):
print('File not found: "%s". Code gen dirty for %s' % (fname, name))
found_dirty_hash = True
new_hashes[fname] = md5(fname)
if (not fname in old_hashes) or (old_hashes[fname] != new_hashes[fname]):
print('Hash for "%s" dirty for %s generator.' % (fname, name))
found_dirty_hash = True
return found_dirty_hash
def any_old_hash_missing(all_new_hashes, all_old_hashes):
result = False
for file, old_hashes in all_old_hashes.items():
if file not in all_new_hashes:
print('"%s" does not exist. Code gen dirty.' % file)
result = True
for name, _ in old_hashes.items():
if name not in all_new_hashes[file]:
print('Hash for %s is missing from "%s". Code gen is dirty.' % (name, file))
result = True
return result
def update_output_hashes(script, outputs, new_hashes):
for output in outputs:
if not os.path.isfile(output):
print('Output is missing from %s: %s' % (script, output))
new_hashes[output] = md5(output)
def load_hashes():
hashes = {}
for file in os.listdir(hash_dir):
hash_fname = os.path.join(hash_dir, file)
with open(hash_fname) as hash_file:
hashes[file] = json.load(hash_file)
except ValueError:
raise Exception("Could not decode JSON from %s" % file)
return hashes
def main():
all_old_hashes = load_hashes()
all_new_hashes = {}
any_dirty = False
parser = argparse.ArgumentParser(description='Generate ANGLE internal code.')
help='verify hashes are not dirty')
'-g', '--generator', action='append', nargs='*', type=str, dest='specified_generators'),
args = parser.parse_args()
ranGenerators = generators
runningSingleGenerator = False
if (args.specified_generators):
ranGenerators = {k: v for k, v in generators.items() if k in args.specified_generators[0]}
runningSingleGenerator = True
if len(ranGenerators) == 0:
print("No valid generators specified.")
return 1
# Just get 'inputs' and 'outputs' from scripts but this runs the scripts so it's a bit slow
infos = {}
with futures.ThreadPoolExecutor(max_workers=8) as executor:
for _, script in sorted(ranGenerators.items()):
infos[script] = executor.submit(auto_script, script)
for name, script in sorted(ranGenerators.items()):
info = infos[script].result()
fname = get_hash_file_name(name)
filenames = info['inputs'] + info['outputs'] + [script]
new_hashes = {}
if fname not in all_old_hashes:
all_old_hashes[fname] = {}
if any_hash_dirty(name, filenames, new_hashes, all_old_hashes[fname]):
any_dirty = True
if not args.verify_only:
print('Running ' + name + ' code generator')
exe = get_executable_name(script)
subprocess.check_call([exe, os.path.basename(script)],
# Update the hash dictionary.
all_new_hashes[fname] = new_hashes
if not runningSingleGenerator and any_old_hash_missing(all_new_hashes, all_old_hashes):
any_dirty = True
# Handle hashless_generators separately as these don't have hash maps.
hashless_generators_dirty = False
for name, script in sorted(hashless_generators.items()):
cmd = [get_executable_name(script), os.path.basename(script)]
rc = + ['--verify-only'], cwd=get_child_script_dirname(script))
if rc != 0:
print(name + ' generator dirty')
# Don't set any_dirty as we don't need git cl format in this case.
hashless_generators_dirty = True
if not args.verify_only:
print('Running ' + name + ' code generator')
subprocess.check_call(cmd, cwd=get_child_script_dirname(script))
if args.verify_only:
return int(any_dirty or hashless_generators_dirty)
if any_dirty:
args = ['git.bat'] if == 'nt' else ['git']
args += ['cl', 'format']
print('Calling git cl format')
# Update the output hashes again since they can be formatted.
for name, script in sorted(ranGenerators.items()):
info = auto_script(script)
fname = get_hash_file_name(name)
update_output_hashes(name, info['outputs'], all_new_hashes[fname])
for fname, new_hashes in all_new_hashes.items():
hash_fname = os.path.join(hash_dir, fname)
with open(hash_fname, "w") as f:
json.dump(new_hashes, f, indent=2, sort_keys=True, separators=(',', ':\n '))
f.write('\n') # json.dump doesn't end with newline
return 0
if __name__ == '__main__':