diff --git a/.flake8 b/.flake8 deleted file mode 100644 index eee7c33b89d262..00000000000000 --- a/.flake8 +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -exclude=.git,deps,lib,src,tools/gyp,tools/inspector_protocol,tools/pip,tools/v8_gypfiles/broken -ignore=E1,E2,E3,E4,E5,E7,W5,W6 diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index cdf33fd9469325..dde2823a9fa424 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -2,14 +2,10 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler +from __future__ import print_function + +import ast + import gyp.common import gyp.simple_copy import multiprocessing @@ -33,7 +29,6 @@ 'shared_library', 'loadable_module', 'mac_kernel_extension', - 'windows_driver', ] # A list of sections that contain links to other targets. @@ -183,43 +178,39 @@ def CheckedEval(file_contents): Note that this is slower than eval() is. """ - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) + syntax_tree = ast.parse(file_contents) + assert isinstance(syntax_tree, ast.Module) + c1 = syntax_tree.body + assert len(c1) == 1 + c2 = c1[0] + assert isinstance(c2, ast.Expr) + return CheckNode(c2.value, []) def CheckNode(node, keypath): - if isinstance(node, Dict): + if isinstance(node, ast.Dict): c = node.getChildren() dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] + for key, value in zip(node.keys, node.values): + assert isinstance(key, ast.Str) + key = key.s if key in dict: raise GypError("Key '" + key + "' repeated at level " + repr(len(keypath) + 1) + " with key path '" + '.'.join(keypath) + "'") kp = list(keypath) # Make a copy of the list for descending this node. kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) + dict[key] = CheckNode(value, kp) return dict - elif isinstance(node, List): - c = node.getChildren() + elif isinstance(node, ast.List): children = [] - for index, child in enumerate(c): + for index, child in enumerate(node.elts): kp = list(keypath) # Copy list. kp.append(repr(index)) children.append(CheckNode(child, kp)) return children - elif isinstance(node, Const): - return node.getChildren()[0] + elif isinstance(node, ast.Str): + return node.s else: raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + "': " + repr(node)) @@ -231,7 +222,15 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, return data[build_file_path] if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() + # Open the build file for read ('r') with universal-newlines mode ('U') + # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n' + # which otherwise will fail eval() + if sys.platform == 'zos': + # On z/OS, universal-newlines mode treats the file as an ascii file. But since + # node-gyp produces ebcdic files, do not use that mode. + build_file_contents = open(build_file_path, 'r').read() + else: + build_file_contents = open(build_file_path, 'rU').read() else: raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) @@ -242,10 +241,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: build_file_data = eval(build_file_contents, {'__builtins__': None}, None) - except SyntaxError, e: + except SyntaxError as e: e.filename = build_file_path raise - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) raise @@ -265,7 +264,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, aux_data, None, check) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading includes of ' + build_file_path) raise @@ -302,7 +301,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, subdict_path, include) # Recurse into subdictionaries. - for k, v in subdict.iteritems(): + for k, v in subdict.items(): if type(v) is dict: LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) @@ -467,7 +466,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, try: LoadTargetBuildFile(dependency, data, aux_data, variables, includes, depth, check, load_dependencies) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend( e, 'while loading dependencies of %s' % build_file_path) raise @@ -488,7 +487,7 @@ def CallLoadTargetBuildFile(global_flags, signal.signal(signal.SIGINT, signal.SIG_IGN) # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): + for key, value in global_flags.items(): globals()[key] = value SetGeneratorGlobals(generator_input_info) @@ -510,12 +509,12 @@ def CallLoadTargetBuildFile(global_flags, return (build_file_path, build_file_data, dependencies) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return None - except Exception, e: - print >>sys.stderr, 'Exception:', e - print >>sys.stderr, traceback.format_exc() + except Exception as e: + print('Exception:', e, file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) return None @@ -605,7 +604,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, args = (global_flags, dependency, variables, includes, depth, check, generator_input_info), callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: parallel_state.pool.terminate() raise e @@ -881,7 +880,6 @@ def ExpandVariables(input, phase, variables, build_file): oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir. if build_file_dir: # build_file_dir may be None (see above). os.chdir(build_file_dir) - sys.path.append(os.getcwd()) try: parsed_contents = shlex.split(contents) @@ -892,7 +890,6 @@ def ExpandVariables(input, phase, variables, build_file): "module (%s): %s" % (parsed_contents[0], e)) replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip() finally: - sys.path.pop() os.chdir(oldwd) assert replacement != None elif command_string: @@ -907,7 +904,7 @@ def ExpandVariables(input, phase, variables, build_file): stderr=subprocess.PIPE, stdin=subprocess.PIPE, cwd=build_file_dir) - except Exception, e: + except Exception as e: raise GypError("%s while executing command '%s' in %s" % (e, contents, build_file)) @@ -947,8 +944,12 @@ def ExpandVariables(input, phase, variables, build_file): else: replacement = variables[contents] + if isinstance(replacement, bytes) and not isinstance(replacement, str): + replacement = replacement.decode("utf-8") # done on Python 3 only if type(replacement) is list: for item in replacement: + if isinstance(item, bytes) and not isinstance(item, str): + item = item.decode("utf-8") # done on Python 3 only if not contents[-1] == '/' and type(item) not in (str, int): raise GypError('Variable ' + contents + ' must expand to a string or list of strings; ' + @@ -1021,7 +1022,7 @@ def ExpandVariables(input, phase, variables, build_file): # Convert all strings that are canonically-represented integers into integers. if type(output) is list: - for index in xrange(0, len(output)): + for index in range(0, len(output)): if IsStrCanonicalInt(output[index]): output[index] = int(output[index]) elif IsStrCanonicalInt(output): @@ -1092,13 +1093,13 @@ def EvalSingleCondition( if eval(ast_code, {'__builtins__': None}, variables): return true_dict return false_dict - except SyntaxError, e: + except SyntaxError as e: syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' 'at character %d.' % (str(e.args[0]), e.text, build_file, e.offset), e.filename, e.lineno, e.offset, e.text) raise syntax_error - except NameError, e: + except NameError as e: gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % (cond_expr_expanded, build_file)) raise GypError(e) @@ -1153,7 +1154,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): def LoadAutomaticVariablesFromDict(variables, the_dict): # Any keys with plain string values in the_dict become automatic variables. # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) in (str, int, list): variables['_' + key] = value @@ -1166,7 +1167,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): # the_dict in the_dict's parent dict. If the_dict's parent is not a dict # (it could be a list or it could be parentless because it is a root dict), # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): + for key, value in the_dict.get('variables', {}).items(): if type(value) not in (str, int, list): continue @@ -1175,7 +1176,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): if variable_name in variables: # If the variable is already set, don't set it. continue - if the_dict_key is 'variables' and variable_name in the_dict: + if the_dict_key == 'variables' and variable_name in the_dict: # If the variable is set without a % in the_dict, and the_dict is a # variables dict (making |variables| a varaibles sub-dict of a # variables dict), use the_dict's definition. @@ -1205,7 +1206,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # list before we process them so that you can reference one # variable from another. They will be fully expanded by recursion # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): + for key, value in the_dict['variables'].items(): variables[key] = value # Handle the associated variables dict first, so that any variable @@ -1218,7 +1219,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables", which was already processed if present. if key != 'variables' and type(value) is str: expanded = ExpandVariables(value, phase, variables, build_file) @@ -1276,7 +1277,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # Recurse into child dicts, or process child lists which may result in # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables" and string values, which were already processed if # present. if key == 'variables' or type(value) is str: @@ -1373,12 +1374,12 @@ def QualifyDependencies(targets): for dep in dependency_sections for op in ('', '!', '/')] - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): target_build_file = gyp.common.BuildFile(target) toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): + for index in range(0, len(dependencies)): dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( target_build_file, dependencies[index], toolset) if not multiple_toolsets: @@ -1413,13 +1414,13 @@ def ExpandWildcardDependencies(targets, data): dependency list, must be qualified when this function is called. """ - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): toolset = target_dict['toolset'] target_build_file = gyp.common.BuildFile(target) for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) - # Loop this way instead of "for dependency in" or "for index in xrange" + # Loop this way instead of "for dependency in" or "for index in range" # because the dependencies list will be modified within the loop body. index = 0 while index < len(dependencies): @@ -1475,7 +1476,7 @@ def Unify(l): def RemoveDuplicateDependencies(targets): """Makes sure every dependency appears only once in all targets's dependency lists.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1491,7 +1492,7 @@ def Filter(l, item): def RemoveSelfDependencies(targets): """Remove self dependencies from targets that have the prune_self_dependency variable set.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1504,7 +1505,7 @@ def RemoveSelfDependencies(targets): def RemoveLinkDependenciesFromNoneTargets(targets): """Remove dependencies having the 'link_dependency' attribute from the 'none' targets.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1542,15 +1543,11 @@ def FlattenToList(self): # dependents. flat_list = OrderedSet() - def ExtractNodeRef(node): - """Extracts the object that the node represents from the given node.""" - return node.ref - # in_degree_zeros is the list of DependencyGraphNodes that have no # dependencies not in flat_list. Initially, it is a copy of the children # of this node, because when the graph was built, nodes with no # dependencies were made implicit dependents of the root node. - in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) + in_degree_zeros = set(self.dependents[:]) while in_degree_zeros: # Nodes in in_degree_zeros have no dependencies not in flat_list, so they @@ -1562,13 +1559,12 @@ def ExtractNodeRef(node): # Look at dependents of the node just added to flat_list. Some of them # may now belong in in_degree_zeros. - for node_dependent in sorted(node.dependents, key=ExtractNodeRef): + for node_dependent in node.dependents: is_in_degree_zero = True # TODO: We want to check through the # node_dependent.dependencies list but if it's long and we # always start at the beginning, then we get O(n^2) behaviour. - for node_dependent_dependency in (sorted(node_dependent.dependencies, - key=ExtractNodeRef)): + for node_dependent_dependency in node_dependent.dependencies: if not node_dependent_dependency.ref in flat_list: # The dependent one or more dependencies not in flat_list. There # will be more chances to add it to flat_list when examining @@ -1581,7 +1577,7 @@ def ExtractNodeRef(node): # All of the dependent's dependencies are already in flat_list. Add # it to in_degree_zeros where it will be processed in a future # iteration of the outer loop. - in_degree_zeros += [node_dependent] + in_degree_zeros.add(node_dependent) return list(flat_list) @@ -1737,13 +1733,12 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries, dependencies.add(self.ref) return dependencies - # Executables, mac kernel extensions, windows drivers and loadable modules - # are already fully and finally linked. Nothing else can be a link - # dependency of them, there can only be dependencies in the sense that a - # dependent target might run an executable or load the loadable_module. + # Executables, mac kernel extensions and loadable modules are already fully + # and finally linked. Nothing else can be a link dependency of them, there + # can only be dependencies in the sense that a dependent target might run + # an executable or load the loadable_module. if not initial and target_type in ('executable', 'loadable_module', - 'mac_kernel_extension', - 'windows_driver'): + 'mac_kernel_extension'): return dependencies # Shared libraries are already fully linked. They should only be included @@ -1796,14 +1791,14 @@ def BuildDependencyList(targets): # Create a DependencyGraphNode for each target. Put it into a dict for easy # access. dependency_nodes = {} - for target, spec in targets.iteritems(): + for target, spec in targets.items(): if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) # Set up the dependency links. Targets that have no dependencies are treated # as dependent on root_node. root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): + for target, spec in targets.items(): target_node = dependency_nodes[target] target_build_file = gyp.common.BuildFile(target) dependencies = spec.get('dependencies') @@ -1846,20 +1841,20 @@ def VerifyNoGYPFileCircularDependencies(targets): # Create a DependencyGraphNode for each gyp file containing a target. Put # it into a dict for easy access. dependency_nodes = {} - for target in targets.iterkeys(): + for target in targets: build_file = gyp.common.BuildFile(target) if not build_file in dependency_nodes: dependency_nodes[build_file] = DependencyGraphNode(build_file) # Set up the dependency links. - for target, spec in targets.iteritems(): + for target, spec in targets.items(): build_file = gyp.common.BuildFile(target) build_file_node = dependency_nodes[build_file] target_dependencies = spec.get('dependencies', []) for dependency in target_dependencies: try: dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: + except GypError as e: gyp.common.ExceptionAppend( e, 'while computing dependencies of .gyp file %s' % build_file) raise @@ -1877,7 +1872,7 @@ def VerifyNoGYPFileCircularDependencies(targets): # Files that have no dependencies are treated as dependent on root_node. root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): + for build_file_node in dependency_nodes.values(): if len(build_file_node.dependencies) == 0: build_file_node.dependencies.append(root_node) root_node.dependents.append(build_file_node) @@ -2039,7 +2034,7 @@ def MakePathRelative(to_file, fro_file, item): gyp.common.RelativePath(os.path.dirname(fro_file), os.path.dirname(to_file)), item)).replace('\\', '/') - if item[-1] == '/': + if item[-1:] == '/': ret += '/' return ret @@ -2117,7 +2112,7 @@ def is_in_set_or_list(x, s, l): def MergeDicts(to, fro, to_file, fro_file): # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): + for k, v in fro.items(): # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give # copy semantics. Something else may want to merge from the |fro| dict # later, and having the same dict ref pointed to twice in the tree isn't @@ -2252,13 +2247,13 @@ def SetUpConfigurations(target, target_dict): if not 'configurations' in target_dict: target_dict['configurations'] = {'Default': {}} if not 'default_configuration' in target_dict: - concrete = [i for (i, config) in target_dict['configurations'].iteritems() + concrete = [i for (i, config) in target_dict['configurations'].items() if not config.get('abstract')] target_dict['default_configuration'] = sorted(concrete)[0] merged_configurations = {} configs = target_dict['configurations'] - for (configuration, old_configuration_dict) in configs.iteritems(): + for (configuration, old_configuration_dict) in configs.items(): # Skip abstract configurations (saves work only). if old_configuration_dict.get('abstract'): continue @@ -2266,7 +2261,7 @@ def SetUpConfigurations(target, target_dict): # Get the inheritance relationship right by making a copy of the target # dict. new_configuration_dict = {} - for (key, target_val) in target_dict.iteritems(): + for (key, target_val) in target_dict.items(): key_ext = key[-1:] if key_ext in key_suffixes: key_base = key[:-1] @@ -2350,7 +2345,7 @@ def ProcessListFiltersInDict(name, the_dict): lists = [] del_lists = [] - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): operation = key[-1] if operation != '!' and operation != '/': continue @@ -2398,7 +2393,7 @@ def ProcessListFiltersInDict(name, the_dict): exclude_key = list_key + '!' if exclude_key in the_dict: for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): if exclude_item == the_list[index]: # This item matches the exclude_item, so set its action to 0 # (exclude). @@ -2424,7 +2419,7 @@ def ProcessListFiltersInDict(name, the_dict): raise ValueError('Unrecognized action ' + action + ' in ' + name + \ ' key ' + regex_key) - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): list_item = the_list[index] if list_actions[index] == action_value: # Even if the regex matches, nothing will change so continue (regex @@ -2455,7 +2450,7 @@ def ProcessListFiltersInDict(name, the_dict): # the indices of items that haven't been seen yet don't shift. That means # that things need to be prepended to excluded_list to maintain them in the # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): + for index in range(len(list_actions) - 1, -1, -1): if list_actions[index] == 0: # Dump anything with action 0 (exclude). Keep anything with action 1 # (include) or -1 (no include or exclude seen for the item). @@ -2468,7 +2463,7 @@ def ProcessListFiltersInDict(name, the_dict): the_dict[excluded_key] = excluded_list # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) is dict: ProcessListFiltersInDict(key, value) elif type(value) is list: @@ -2494,7 +2489,7 @@ def ValidateTargetType(target, target_dict): """ VALID_TARGET_TYPES = ('executable', 'loadable_module', 'static_library', 'shared_library', - 'mac_kernel_extension', 'none', 'windows_driver') + 'mac_kernel_extension', 'none') target_type = target_dict.get('type', None) if target_type not in VALID_TARGET_TYPES: raise GypError("Target %s has an invalid target type '%s'. " @@ -2525,13 +2520,13 @@ def ValidateSourcesInTarget(target, target_dict, build_file, basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'libtool on Mac cannot handle that. Use ' + print('static library %s has several files with the same basename:\n' % target + + error + 'libtool on Mac cannot handle that. Use ' '--no-duplicate-basename-check to disable this validation.') raise GypError('Duplicate basenames in sources section, see list above') @@ -2645,7 +2640,7 @@ def ValidateActionsInTarget(target, target_dict, build_file): def TurnIntIntoStrInDict(the_dict): """Given dict the_dict, recursively converts all integers into strings. """ - # Use items instead of iteritems because there's no need to try to look at + # Use items instead of items because there's no need to try to look at # reinserted keys and their associated values. for k, v in the_dict.items(): if type(v) is int: @@ -2664,7 +2659,7 @@ def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInList(the_list): """Given list the_list, recursively converts all integers into strings. """ - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): item = the_list[index] if type(item) is int: the_list[index] = str(item) @@ -2782,7 +2777,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, try: LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth, check, True) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) raise @@ -2804,7 +2799,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, RemoveLinkDependenciesFromNoneTargets(targets) # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): tmp_dict = {} for key_base in dependency_sections: for op in ('', '!', '/'):