from __future__ import annotations
import contextlib
import copy
import dataclasses
import json
import logging
import os
import pathlib
import re
import shutil
import subprocess
import sys
from typing import Dict, Iterator, List, Optional, Tuple
_SRC_PATH = pathlib.Path(__file__).resolve().parents[2]
_BUILD_ANDROID_GYP_PATH = _SRC_PATH / 'build/android/gyp'
if str(_BUILD_ANDROID_GYP_PATH) not in sys.path:
sys.path.append(str(_BUILD_ANDROID_GYP_PATH))
from util import build_utils
NODE_CHILD = 'child'
NODE_TYPE = 'type'
NODE_VALUE = 'value'
BEFORE_COMMENT = 'before_comment'
SUFFIX_COMMENT = 'suffix_comment'
AFTER_COMMENT = 'after_comment'
@contextlib.contextmanager
def _backup_and_restore_file_contents(path: str):
with open(path) as f:
contents = f.read()
try:
yield
finally:
with open(path, 'w') as f:
f.write(contents)
def _build_targets_output(
out_dir: str,
targets: List[str],
should_print: Optional[bool] = None) -> Optional[str]:
env = os.environ.copy()
if should_print is None:
should_print = logging.getLogger().isEnabledFor(logging.DEBUG)
if 'NINJA_SUMMARIZE_BUILD' in env:
del env['NINJA_SUMMARIZE_BUILD']
proc = subprocess.Popen(['autoninja', '-C', out_dir] + targets,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env,
text=True)
lines = []
prev_line = ''
width = shutil.get_terminal_size().columns
while proc.poll() is None:
line = proc.stdout.readline()
lines.append(line)
if should_print:
if prev_line.startswith('[') and line.startswith('['):
msg = line.rstrip()
if len(msg) > width:
length_to_show = width - 5
msg = f'{msg[:2]}...{msg[-length_to_show:]}'
msg = f'\r{msg}\033[K'
elif prev_line.startswith('['):
msg = f'\n{line}'
else:
msg = line
print(msg, end='')
prev_line = line
if proc.returncode != 0:
return None
return ''.join(lines)
def _generate_project_json_content(out_dir: str) -> str:
build_utils.CheckOutput(['gn', 'gen', '--ide=json', out_dir])
with open(os.path.join(out_dir, 'project.json')) as f:
return f.read()
@dataclasses.dataclass
class DepList:
target_name: Optional[str] variable_name: str child_nodes: List[dict] operation: str
class BuildFile:
def __init__(self,
build_gn_path: str,
root_gn_path: pathlib.Path,
*,
dryrun: bool = False):
self._root = root_gn_path
self._rel_path = os.path.relpath(build_gn_path, root_gn_path)
self._gn_rel_path = '//' + os.path.dirname(self._rel_path)
self._full_path = os.path.abspath(build_gn_path)
self._skip_write_content = dryrun
def __enter__(self):
output = build_utils.CheckOutput(
['gn', 'format', '--dump-tree=json', self._full_path])
self._content = json.loads(output)
self._original_content = json.dumps(self._content)
return self
def __exit__(self, exc, value, tb):
if not self._skip_write_content:
self.write_content_to_file()
def _find_all(self, match_fn):
results = []
def get_target_name(node) -> Optional[str]:
if node.get(NODE_TYPE) != 'FUNCTION':
return None
children = node.get(NODE_CHILD)
if not children:
return None
first_child = children[0]
if first_child.get(NODE_TYPE) != 'LIST':
return None
grand_children = first_child.get(NODE_CHILD)
if not grand_children:
return None
grand_child = grand_children[0]
if grand_child.get(NODE_TYPE) != 'LITERAL':
return None
name = grand_child.get(NODE_VALUE)
if name.startswith('"'):
return name[1:-1]
return name
def recursive_find(root, last_known_target=None):
target_name = get_target_name(root) or last_known_target
matched = match_fn(root)
if matched is not None:
results.append((target_name, matched))
return
children = root.get(NODE_CHILD)
if children:
for child in children:
recursive_find(child, last_known_target=target_name)
recursive_find(self._content)
return results
def _normalize(self,
name: Optional[str],
abs_path: bool = True,
allow_relative: bool = False):
if not name:
return ''
if name.startswith('"'):
name = name[1:-1]
if not name.startswith('$'):
if not name.startswith('//') and abs_path:
name = self._gn_rel_path + name
if not ':' in name:
name += ':' + os.path.basename(name)
if allow_relative:
base_path, target_name = name.split(':')
if base_path == self._gn_rel_path:
return ':' + target_name
return name
def _find_all_list_assignments(self):
def match_list_assignments(node):
if node.get(NODE_TYPE) != 'BINARY':
return None
operation = node.get(NODE_VALUE)
children = node.get(NODE_CHILD)
assert len(children) == 2, (
'Binary nodes should have two child nodes, but the node is: '
f'{node}')
left_child, right_child = children
if left_child.get(NODE_TYPE) != 'IDENTIFIER':
return None
name = left_child.get(NODE_VALUE)
if right_child.get(NODE_TYPE) != 'LIST':
return None
list_of_nodes = right_child.get(NODE_CHILD)
return name, list_of_nodes, operation
return self._find_all(match_list_assignments)
def _find_all_deps_lists(self) -> Iterator[DepList]:
list_tuples = self._find_all_list_assignments()
for target_name, (var_name, node_list, operation) in list_tuples:
if (var_name == 'deps' or var_name.startswith('deps_')
or var_name.endswith('_deps') or '_deps_' in var_name):
yield DepList(target_name=target_name,
variable_name=var_name,
child_nodes=node_list,
operation=operation)
def _new_literal_node(self, value: str, begin_line: int = 1):
return {
'location': {
'begin_column': 1,
'begin_line': begin_line,
'end_column': 2,
'end_line': begin_line,
},
'type': 'LITERAL',
'value': f'"{value}"'
}
def _clone_replacing_value(self, node_to_copy: Dict, new_dep_name: str):
new_dep = copy.deepcopy(node_to_copy)
for comment_key in (BEFORE_COMMENT, AFTER_COMMENT, SUFFIX_COMMENT):
new_dep.pop(comment_key, None) new_dep[NODE_VALUE] = f'"{new_dep_name}"'
return new_dep
def add_deps(self, target: str, deps: List[str]) -> bool:
added_new_dep = False
normalized_target = self._normalize(target)
for dep_list in self._find_all_deps_lists():
if dep_list.target_name is None:
continue
if dep_list.operation != '=':
continue
full_target_name = f'{self._gn_rel_path}:{dep_list.target_name}'
if (target != dep_list.target_name
and normalized_target != full_target_name):
continue
if dep_list.variable_name != 'deps':
continue
existing_dep_names = set(
self._normalize(child.get(NODE_VALUE), abs_path=False)
for child in dep_list.child_nodes)
for new_dep_name in deps:
new_dep_name = self._normalize(new_dep_name,
allow_relative=True)
if new_dep_name in existing_dep_names:
logging.info(
f'Skipping existing {new_dep_name} in {target}.deps')
continue
logging.info(f'Adding {new_dep_name} to {target}.deps')
if not dep_list.child_nodes:
new_dep = self._new_literal_node(new_dep_name)
else:
new_dep = self._clone_replacing_value(
dep_list.child_nodes[0], new_dep_name)
dep_list.child_nodes.append(new_dep)
added_new_dep = True
if not added_new_dep:
print(f'Unable to find {target}')
return added_new_dep
def search_deps(self, name_query: Optional[str],
path_query: Optional[str]) -> bool:
if path_query:
if not re.search(path_query, self._rel_path):
return False
elif not name_query:
print(self._rel_path)
return True
for dep_list in self._find_all_deps_lists():
for child in dep_list.child_nodes:
dep_name = self._normalize(child.get(NODE_VALUE),
abs_path=False)
if name_query and re.search(name_query, dep_name):
print(f'{self._rel_path}: {dep_name} in '
f'{dep_list.target_name}.{dep_list.variable_name}')
return True
return False
def split_deps(self, original_dep_name: str,
new_dep_names: List[str]) -> bool:
split = False
for new_dep_name in new_dep_names:
if self._split_dep(original_dep_name, new_dep_name):
split = True
return split
def _split_dep(self, original_dep_name: str, new_dep_name: str) -> bool:
for dep_name in (original_dep_name, new_dep_name):
assert dep_name.startswith('//'), (
f'Absolute GN path required, starting with //: {dep_name}')
added_new_dep = False
normalized_original_dep_name = self._normalize(original_dep_name)
normalized_new_dep_name = self._normalize(new_dep_name)
for dep_list in self._find_all_deps_lists():
original_dep_idx = None
new_dep_already_exists = False
for idx, child in enumerate(dep_list.child_nodes):
dep_name = self._normalize(child.get(NODE_VALUE))
if dep_name == normalized_original_dep_name:
original_dep_idx = idx
if dep_name == normalized_new_dep_name:
new_dep_already_exists = True
if original_dep_idx is not None and not new_dep_already_exists:
if dep_list.target_name is None:
target_str = self._gn_rel_path
else:
target_str = f'{self._gn_rel_path}:{dep_list.target_name}'
location = f"{target_str}'s {dep_list.variable_name} variable"
logging.info(f'Adding {new_dep_name} to {location}')
new_dep = self._clone_replacing_value(
dep_list.child_nodes[original_dep_idx], new_dep_name)
dep_list.child_nodes.insert(original_dep_idx + 1, new_dep)
added_new_dep = True
return added_new_dep
def remove_deps(self,
dep_names: List[str],
out_dir: str,
targets: List[str],
target_name_filter: Optional[str],
inline_mode: bool = False) -> Tuple[bool, str]:
if not inline_mode:
deps_to_remove = dep_names
else:
first_dep = dep_names[0]
if not self._remove_deps([first_dep], out_dir, targets,
target_name_filter):
return False
deps_to_remove = dep_names[1:]
return self._remove_deps(deps_to_remove, out_dir, targets,
target_name_filter)
def _remove_deps(self, dep_names: List[str], out_dir: str,
targets: List[str],
target_name_filter: Optional[str]) -> Tuple[bool, str]:
normalized_dep_names = set()
for dep_name in dep_names:
assert dep_name.startswith('//'), (
f'Absolute GN path required, starting with //: {dep_name}')
normalized_dep_names.add(self._normalize(dep_name))
removed_dep = False
for dep_list in self._find_all_deps_lists():
child_deps_to_remove = [
c for c in dep_list.child_nodes
if self._normalize(c.get(NODE_VALUE)) in normalized_dep_names
]
if not child_deps_to_remove:
continue
if dep_list.target_name is None:
target_name_str = self._gn_rel_path
else:
target_name_str = f'{self._gn_rel_path}:{dep_list.target_name}'
if (target_name_filter is not None and
re.search(target_name_filter, target_name_str) is None):
logging.info(f'Skip: Since re.search("{target_name_filter}", '
f'"{target_name_str}") is None.')
continue
location = f"{target_name_str}'s {dep_list.variable_name} variable"
expected_json = _generate_project_json_content(out_dir)
num_to_remove = len(child_deps_to_remove)
for remove_idx, child_dep in enumerate(child_deps_to_remove):
child_dep_name = self._normalize(child_dep.get(NODE_VALUE))
idx_to_remove = dep_list.child_nodes.index(child_dep)
logging.info(f'({remove_idx + 1}/{num_to_remove}) Found '
f'{child_dep_name} in {location}.')
child_to_remove = dep_list.child_nodes[idx_to_remove]
can_remove_dep = False
with _backup_and_restore_file_contents(self._full_path):
dep_list.child_nodes.remove(child_to_remove)
self.write_content_to_file()
dep_list.child_nodes.insert(idx_to_remove, child_to_remove)
if expected_json is not None:
after_json = _generate_project_json_content(out_dir)
if expected_json == after_json:
logging.info('Skip: No changes to project.json.')
break
expected_json = None
if self._can_still_build_everything(out_dir, targets):
can_remove_dep = True
if not can_remove_dep:
continue
dep_list.child_nodes.remove(child_to_remove)
if (BEFORE_COMMENT in child_to_remove
and idx_to_remove < len(dep_list.child_nodes)):
child_after = dep_list.child_nodes[idx_to_remove]
if BEFORE_COMMENT not in child_after:
child_after[BEFORE_COMMENT] = []
child_after[BEFORE_COMMENT][:] = (
child_to_remove[BEFORE_COMMENT] +
child_after[BEFORE_COMMENT])
removed_dep = True
logging.info(f'Removed {child_dep_name} from {location}.')
return removed_dep
def _can_still_build_everything(self, out_dir: str,
targets: List[str]) -> bool:
output = _build_targets_output(out_dir, targets)
if output is None:
logging.info('Ninja failed to build all targets')
return False
if 'ninja: no work to do.' in output:
logging.info('Ninja did not find any targets to build')
return False
return True
def write_content_to_file(self) -> None:
current_content = json.dumps(self._content)
if current_content != self._original_content:
subprocess.run(
['gn', 'format', '--read-tree=json', self._full_path],
text=True,
check=True,
input=current_content)