Commit 9601f078 authored by Roberto Carrillo's avatar Roberto Carrillo Committed by Commit Bot

Use posixpath instead of os.path in checkteamtags code.

The issue is that os.path uses backslashes when run on Windows, and
makes it fail unittests.

This change should make it so that it always uses forward slashes.

R=sajjadm,martiniss,joenotcharles

Bug: 1020240
Change-Id: Ie8637a23f6799d2d353971b7ce31c306edfaffa3
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1894328Reviewed-by: default avatarNodir Turakulov <nodir@chromium.org>
Commit-Queue: Roberto Carrillo <robertocn@chromium.org>
Cr-Commit-Position: refs/heads/master@{#711924}
parent 55a9f014
...@@ -11,13 +11,14 @@ import json ...@@ -11,13 +11,14 @@ import json
import logging import logging
import optparse import optparse
import os import os
import posixpath
import re import re
import sys import sys
import urllib2 import urllib2
from collections import defaultdict from collections import defaultdict
from owners_file_tags import parse from owners_file_tags import parse, uniform_path_format
DEFAULT_MAPPING_URL = \ DEFAULT_MAPPING_URL = \
...@@ -105,7 +106,9 @@ def validate_mappings(options, args): ...@@ -105,7 +106,9 @@ def validate_mappings(options, args):
deleted.append(os.path.dirname(rel)) deleted.append(os.path.dirname(rel))
# Update component mapping with current changes. # Update component mapping with current changes.
for rel_path, tags in affected.iteritems(): for rel_path_native, tags in affected.iteritems():
# Make the path use forward slashes always.
rel_path = uniform_path_format(rel_path_native)
component = tags.get('component') component = tags.get('component')
team = tags.get('team') team = tags.get('team')
os_tag = tags.get('os') os_tag = tags.get('os')
......
...@@ -183,7 +183,7 @@ class ExtractComponentsTest(unittest.TestCase): ...@@ -183,7 +183,7 @@ class ExtractComponentsTest(unittest.TestCase):
with mock.patch('sys.stdout', saved_output): with mock.patch('sys.stdout', saved_output):
extract_components.main(['%prog', '-v', 'src']) extract_components.main(['%prog', '-v', 'src'])
output = saved_output.getvalue() output = saved_output.getvalue()
self.assertIn('./OWNERS has no COMPONENT tag', output) self.assertIn('OWNERS has no COMPONENT tag', output)
def testCoverage(self): def testCoverage(self):
with mock.patch('extract_components.scrape_owners', return_value={ with mock.patch('extract_components.scrape_owners', return_value={
...@@ -253,4 +253,4 @@ class ExtractComponentsTest(unittest.TestCase): ...@@ -253,4 +253,4 @@ class ExtractComponentsTest(unittest.TestCase):
self.assertIn('OWNERS files that have missing team and component ' self.assertIn('OWNERS files that have missing team and component '
'by depth:', output) 'by depth:', output)
self.assertIn('at depth 0', output) self.assertIn('at depth 0', output)
self.assertIn('[\'./OWNERS\']', output) self.assertIn('[\'OWNERS\']', output)
...@@ -3,11 +3,16 @@ ...@@ -3,11 +3,16 @@
# found in the LICENSE file. # found in the LICENSE file.
import os import os
import posixpath
import re import re
from collections import defaultdict from collections import defaultdict
def uniform_path_format(native_path):
"""Alters the path if needed to be separated by forward slashes."""
return posixpath.normpath(native_path.replace(os.sep, posixpath.sep))
def parse(filename): def parse(filename):
"""Searches the file for lines that start with `# TEAM:` or `# COMPONENT:`. """Searches the file for lines that start with `# TEAM:` or `# COMPONENT:`.
...@@ -77,10 +82,10 @@ def aggregate_components_from_owners(all_owners_data, root): ...@@ -77,10 +82,10 @@ def aggregate_components_from_owners(all_owners_data, root):
dir_missing_info_by_depth = defaultdict(list) dir_missing_info_by_depth = defaultdict(list)
dir_to_team = {} dir_to_team = {}
for rel_dirname, owners_data in all_owners_data.iteritems(): for rel_dirname, owners_data in all_owners_data.iteritems():
# We apply relpath to remove any possible `.` and `..` chunks and make # Normalize this relative path to posix-style to make counting separators
# counting separators work correctly as a means of obtaining the file_depth. # work correctly as a means of obtaining the file_depth.
rel_path = os.path.relpath(rel_dirname, root) rel_path = uniform_path_format(os.path.relpath(rel_dirname, root))
file_depth = 0 if rel_path == '.' else rel_path.count(os.path.sep) + 1 file_depth = 0 if rel_path == '.' else rel_path.count(posixpath.sep) + 1
num_total += 1 num_total += 1
num_total_by_depth[file_depth] += 1 num_total_by_depth[file_depth] += 1
component = owners_data.get('component') component = owners_data.get('component')
...@@ -102,7 +107,7 @@ def aggregate_components_from_owners(all_owners_data, root): ...@@ -102,7 +107,7 @@ def aggregate_components_from_owners(all_owners_data, root):
component]['depth']: component]['depth']:
topmost_team[component] = {'depth': file_depth, 'team': team} topmost_team[component] = {'depth': file_depth, 'team': team}
else: else:
rel_owners_path = os.path.join(rel_dirname, 'OWNERS') rel_owners_path = uniform_path_format(os.path.join(rel_dirname, 'OWNERS'))
warnings.append('%s has no COMPONENT tag' % rel_owners_path) warnings.append('%s has no COMPONENT tag' % rel_owners_path)
if not team and not os_tag: if not team and not os_tag:
dir_missing_info_by_depth[file_depth].append(rel_owners_path) dir_missing_info_by_depth[file_depth].append(rel_owners_path)
...@@ -175,7 +180,7 @@ def scrape_owners(root, include_subdirs): ...@@ -175,7 +180,7 @@ def scrape_owners(root, include_subdirs):
""" Find the value of tag in the nearest ancestor that defines it.""" """ Find the value of tag in the nearest ancestor that defines it."""
ancestor = os.path.dirname(dirname) ancestor = os.path.dirname(dirname)
while ancestor: while ancestor:
rel_ancestor = os.path.relpath(ancestor, root) rel_ancestor = uniform_path_format(os.path.relpath(ancestor, root))
if rel_ancestor in data and data[rel_ancestor].get(tag): if rel_ancestor in data and data[rel_ancestor].get(tag):
return data[rel_ancestor][tag] return data[rel_ancestor][tag]
if rel_ancestor == '.': if rel_ancestor == '.':
...@@ -186,7 +191,7 @@ def scrape_owners(root, include_subdirs): ...@@ -186,7 +191,7 @@ def scrape_owners(root, include_subdirs):
for dirname, _, files in os.walk(root): for dirname, _, files in os.walk(root):
# Proofing against windows casing oddities. # Proofing against windows casing oddities.
owners_file_names = [f for f in files if f.upper() == 'OWNERS'] owners_file_names = [f for f in files if f.upper() == 'OWNERS']
rel_dirname = os.path.relpath(dirname, root) rel_dirname = uniform_path_format(os.path.relpath(dirname, root))
if owners_file_names or include_subdirs: if owners_file_names or include_subdirs:
if owners_file_names: if owners_file_names:
owners_full_path = os.path.join(dirname, owners_file_names[0]) owners_full_path = os.path.join(dirname, owners_file_names[0])
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment