mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Format codebase with black and check formatting in CQ
Apply rules set by https://gerrit-review.googlesource.com/c/git-repo/+/362954/ across the codebase and fix any lingering errors caught by flake8. Also check black formatting in run_tests (and CQ). Bug: b/267675342 Change-Id: I972d77649dac351150dcfeb1cd1ad0ea2efc1956 Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/363474 Reviewed-by: Mike Frysinger <vapier@google.com> Tested-by: Gavin Mak <gavinmak@google.com> Commit-Queue: Gavin Mak <gavinmak@google.com>
This commit is contained in:
@ -21,5 +21,5 @@ import repo_trace
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_repo_trace(tmp_path):
|
||||
"""Set an environment marker to relax certain strict checks for test code."""
|
||||
repo_trace._TRACE_FILE = str(tmp_path / 'TRACE_FILE_from_test')
|
||||
"""Set an environment marker to relax certain strict checks for test code.""" # noqa: E501
|
||||
repo_trace._TRACE_FILE = str(tmp_path / "TRACE_FILE_from_test")
|
||||
|
@ -20,37 +20,37 @@ from editor import Editor
|
||||
|
||||
|
||||
class EditorTestCase(unittest.TestCase):
|
||||
"""Take care of resetting Editor state across tests."""
|
||||
"""Take care of resetting Editor state across tests."""
|
||||
|
||||
def setUp(self):
|
||||
self.setEditor(None)
|
||||
def setUp(self):
|
||||
self.setEditor(None)
|
||||
|
||||
def tearDown(self):
|
||||
self.setEditor(None)
|
||||
def tearDown(self):
|
||||
self.setEditor(None)
|
||||
|
||||
@staticmethod
|
||||
def setEditor(editor):
|
||||
Editor._editor = editor
|
||||
@staticmethod
|
||||
def setEditor(editor):
|
||||
Editor._editor = editor
|
||||
|
||||
|
||||
class GetEditor(EditorTestCase):
|
||||
"""Check GetEditor behavior."""
|
||||
"""Check GetEditor behavior."""
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic checking of _GetEditor."""
|
||||
self.setEditor(':')
|
||||
self.assertEqual(':', Editor._GetEditor())
|
||||
def test_basic(self):
|
||||
"""Basic checking of _GetEditor."""
|
||||
self.setEditor(":")
|
||||
self.assertEqual(":", Editor._GetEditor())
|
||||
|
||||
|
||||
class EditString(EditorTestCase):
|
||||
"""Check EditString behavior."""
|
||||
"""Check EditString behavior."""
|
||||
|
||||
def test_no_editor(self):
|
||||
"""Check behavior when no editor is available."""
|
||||
self.setEditor(':')
|
||||
self.assertEqual('foo', Editor.EditString('foo'))
|
||||
def test_no_editor(self):
|
||||
"""Check behavior when no editor is available."""
|
||||
self.setEditor(":")
|
||||
self.assertEqual("foo", Editor.EditString("foo"))
|
||||
|
||||
def test_cat_editor(self):
|
||||
"""Check behavior when editor is `cat`."""
|
||||
self.setEditor('cat')
|
||||
self.assertEqual('foo', Editor.EditString('foo'))
|
||||
def test_cat_editor(self):
|
||||
"""Check behavior when editor is `cat`."""
|
||||
self.setEditor("cat")
|
||||
self.assertEqual("foo", Editor.EditString("foo"))
|
||||
|
@ -22,32 +22,34 @@ import error
|
||||
|
||||
|
||||
class PickleTests(unittest.TestCase):
|
||||
"""Make sure all our custom exceptions can be pickled."""
|
||||
"""Make sure all our custom exceptions can be pickled."""
|
||||
|
||||
def getExceptions(self):
|
||||
"""Return all our custom exceptions."""
|
||||
for name in dir(error):
|
||||
cls = getattr(error, name)
|
||||
if isinstance(cls, type) and issubclass(cls, Exception):
|
||||
yield cls
|
||||
def getExceptions(self):
|
||||
"""Return all our custom exceptions."""
|
||||
for name in dir(error):
|
||||
cls = getattr(error, name)
|
||||
if isinstance(cls, type) and issubclass(cls, Exception):
|
||||
yield cls
|
||||
|
||||
def testExceptionLookup(self):
|
||||
"""Make sure our introspection logic works."""
|
||||
classes = list(self.getExceptions())
|
||||
self.assertIn(error.HookError, classes)
|
||||
# Don't assert the exact number to avoid being a change-detector test.
|
||||
self.assertGreater(len(classes), 10)
|
||||
def testExceptionLookup(self):
|
||||
"""Make sure our introspection logic works."""
|
||||
classes = list(self.getExceptions())
|
||||
self.assertIn(error.HookError, classes)
|
||||
# Don't assert the exact number to avoid being a change-detector test.
|
||||
self.assertGreater(len(classes), 10)
|
||||
|
||||
def testPickle(self):
|
||||
"""Try to pickle all the exceptions."""
|
||||
for cls in self.getExceptions():
|
||||
args = inspect.getfullargspec(cls.__init__).args[1:]
|
||||
obj = cls(*args)
|
||||
p = pickle.dumps(obj)
|
||||
try:
|
||||
newobj = pickle.loads(p)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
self.fail('Class %s is unable to be pickled: %s\n'
|
||||
'Incomplete super().__init__(...) call?' % (cls, e))
|
||||
self.assertIsInstance(newobj, cls)
|
||||
self.assertEqual(str(obj), str(newobj))
|
||||
def testPickle(self):
|
||||
"""Try to pickle all the exceptions."""
|
||||
for cls in self.getExceptions():
|
||||
args = inspect.getfullargspec(cls.__init__).args[1:]
|
||||
obj = cls(*args)
|
||||
p = pickle.dumps(obj)
|
||||
try:
|
||||
newobj = pickle.loads(p)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
self.fail(
|
||||
"Class %s is unable to be pickled: %s\n"
|
||||
"Incomplete super().__init__(...) call?" % (cls, e)
|
||||
)
|
||||
self.assertIsInstance(newobj, cls)
|
||||
self.assertEqual(str(obj), str(newobj))
|
||||
|
@ -19,138 +19,146 @@ import os
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
import mock
|
||||
|
||||
import git_command
|
||||
import wrapper
|
||||
|
||||
|
||||
class GitCommandTest(unittest.TestCase):
|
||||
"""Tests the GitCommand class (via git_command.git)."""
|
||||
"""Tests the GitCommand class (via git_command.git)."""
|
||||
|
||||
def setUp(self):
|
||||
def setUp(self):
|
||||
def realpath_mock(val):
|
||||
return val
|
||||
|
||||
def realpath_mock(val):
|
||||
return val
|
||||
mock.patch.object(
|
||||
os.path, "realpath", side_effect=realpath_mock
|
||||
).start()
|
||||
|
||||
mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start()
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
def test_alternative_setting_when_matching(self):
|
||||
r = git_command._build_env(
|
||||
objdir=os.path.join("zap", "objects"), gitdir="zap"
|
||||
)
|
||||
|
||||
def test_alternative_setting_when_matching(self):
|
||||
r = git_command._build_env(
|
||||
objdir = os.path.join('zap', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
self.assertIsNone(r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"))
|
||||
self.assertEqual(
|
||||
r.get("GIT_OBJECT_DIRECTORY"), os.path.join("zap", "objects")
|
||||
)
|
||||
|
||||
self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects'))
|
||||
def test_alternative_setting_when_different(self):
|
||||
r = git_command._build_env(
|
||||
objdir=os.path.join("wow", "objects"), gitdir="zap"
|
||||
)
|
||||
|
||||
def test_alternative_setting_when_different(self):
|
||||
r = git_command._build_env(
|
||||
objdir = os.path.join('wow', 'objects'),
|
||||
gitdir = 'zap'
|
||||
)
|
||||
|
||||
self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects'))
|
||||
self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
|
||||
self.assertEqual(
|
||||
r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"),
|
||||
os.path.join("zap", "objects"),
|
||||
)
|
||||
self.assertEqual(
|
||||
r.get("GIT_OBJECT_DIRECTORY"), os.path.join("wow", "objects")
|
||||
)
|
||||
|
||||
|
||||
class GitCallUnitTest(unittest.TestCase):
|
||||
"""Tests the _GitCall class (via git_command.git)."""
|
||||
"""Tests the _GitCall class (via git_command.git)."""
|
||||
|
||||
def test_version_tuple(self):
|
||||
"""Check git.version_tuple() handling."""
|
||||
ver = git_command.git.version_tuple()
|
||||
self.assertIsNotNone(ver)
|
||||
def test_version_tuple(self):
|
||||
"""Check git.version_tuple() handling."""
|
||||
ver = git_command.git.version_tuple()
|
||||
self.assertIsNotNone(ver)
|
||||
|
||||
# We don't dive too deep into the values here to avoid having to update
|
||||
# whenever git versions change. We do check relative to this min version
|
||||
# as this is what `repo` itself requires via MIN_GIT_VERSION.
|
||||
MIN_GIT_VERSION = (2, 10, 2)
|
||||
self.assertTrue(isinstance(ver.major, int))
|
||||
self.assertTrue(isinstance(ver.minor, int))
|
||||
self.assertTrue(isinstance(ver.micro, int))
|
||||
# We don't dive too deep into the values here to avoid having to update
|
||||
# whenever git versions change. We do check relative to this min
|
||||
# version as this is what `repo` itself requires via MIN_GIT_VERSION.
|
||||
MIN_GIT_VERSION = (2, 10, 2)
|
||||
self.assertTrue(isinstance(ver.major, int))
|
||||
self.assertTrue(isinstance(ver.minor, int))
|
||||
self.assertTrue(isinstance(ver.micro, int))
|
||||
|
||||
self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
|
||||
self.assertGreaterEqual(ver.micro, 0)
|
||||
self.assertGreaterEqual(ver.major, 0)
|
||||
self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
|
||||
self.assertGreaterEqual(ver.micro, 0)
|
||||
self.assertGreaterEqual(ver.major, 0)
|
||||
|
||||
self.assertGreaterEqual(ver, MIN_GIT_VERSION)
|
||||
self.assertLess(ver, (9999, 9999, 9999))
|
||||
self.assertGreaterEqual(ver, MIN_GIT_VERSION)
|
||||
self.assertLess(ver, (9999, 9999, 9999))
|
||||
|
||||
self.assertNotEqual('', ver.full)
|
||||
self.assertNotEqual("", ver.full)
|
||||
|
||||
|
||||
class UserAgentUnitTest(unittest.TestCase):
|
||||
"""Tests the UserAgent function."""
|
||||
"""Tests the UserAgent function."""
|
||||
|
||||
def test_smoke_os(self):
|
||||
"""Make sure UA OS setting returns something useful."""
|
||||
os_name = git_command.user_agent.os
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^[^ ]+$', os_name)
|
||||
self.assertIsNotNone(m)
|
||||
def test_smoke_os(self):
|
||||
"""Make sure UA OS setting returns something useful."""
|
||||
os_name = git_command.user_agent.os
|
||||
# We can't dive too deep because of OS/tool differences, but we can
|
||||
# check the general form.
|
||||
m = re.match(r"^[^ ]+$", os_name)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
def test_smoke_repo(self):
|
||||
"""Make sure repo UA returns something useful."""
|
||||
ua = git_command.user_agent.repo
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+', ua)
|
||||
self.assertIsNotNone(m)
|
||||
def test_smoke_repo(self):
|
||||
"""Make sure repo UA returns something useful."""
|
||||
ua = git_command.user_agent.repo
|
||||
# We can't dive too deep because of OS/tool differences, but we can
|
||||
# check the general form.
|
||||
m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
def test_smoke_git(self):
|
||||
"""Make sure git UA returns something useful."""
|
||||
ua = git_command.user_agent.git
|
||||
# We can't dive too deep because of OS/tool differences, but we can check
|
||||
# the general form.
|
||||
m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua)
|
||||
self.assertIsNotNone(m)
|
||||
def test_smoke_git(self):
|
||||
"""Make sure git UA returns something useful."""
|
||||
ua = git_command.user_agent.git
|
||||
# We can't dive too deep because of OS/tool differences, but we can
|
||||
# check the general form.
|
||||
m = re.match(r"^git/[^ ]+ ([^ ]+) git-repo/[^ ]+", ua)
|
||||
self.assertIsNotNone(m)
|
||||
|
||||
|
||||
class GitRequireTests(unittest.TestCase):
|
||||
"""Test the git_require helper."""
|
||||
"""Test the git_require helper."""
|
||||
|
||||
def setUp(self):
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
ver = self.wrapper.GitVersion(1, 2, 3, 4)
|
||||
mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start()
|
||||
def setUp(self):
|
||||
self.wrapper = wrapper.Wrapper()
|
||||
ver = self.wrapper.GitVersion(1, 2, 3, 4)
|
||||
mock.patch.object(
|
||||
git_command.git, "version_tuple", return_value=ver
|
||||
).start()
|
||||
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
def tearDown(self):
|
||||
mock.patch.stopall()
|
||||
|
||||
def test_older_nonfatal(self):
|
||||
"""Test non-fatal require calls with old versions."""
|
||||
self.assertFalse(git_command.git_require((2,)))
|
||||
self.assertFalse(git_command.git_require((1, 3)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 4)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 3, 5)))
|
||||
def test_older_nonfatal(self):
|
||||
"""Test non-fatal require calls with old versions."""
|
||||
self.assertFalse(git_command.git_require((2,)))
|
||||
self.assertFalse(git_command.git_require((1, 3)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 4)))
|
||||
self.assertFalse(git_command.git_require((1, 2, 3, 5)))
|
||||
|
||||
def test_newer_nonfatal(self):
|
||||
"""Test non-fatal require calls with newer versions."""
|
||||
self.assertTrue(git_command.git_require((0,)))
|
||||
self.assertTrue(git_command.git_require((1, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 0)))
|
||||
def test_newer_nonfatal(self):
|
||||
"""Test non-fatal require calls with newer versions."""
|
||||
self.assertTrue(git_command.git_require((0,)))
|
||||
self.assertTrue(git_command.git_require((1, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 0)))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 0)))
|
||||
|
||||
def test_equal_nonfatal(self):
|
||||
"""Test require calls with equal values."""
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
|
||||
def test_equal_nonfatal(self):
|
||||
"""Test require calls with equal values."""
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
|
||||
self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
|
||||
|
||||
def test_older_fatal(self):
|
||||
"""Test fatal require calls with old versions."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True)
|
||||
self.assertNotEqual(0, e.code)
|
||||
def test_older_fatal(self):
|
||||
"""Test fatal require calls with old versions."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True)
|
||||
self.assertNotEqual(0, e.code)
|
||||
|
||||
def test_older_fatal_msg(self):
|
||||
"""Test fatal require calls with old versions and message."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True, msg='so sad')
|
||||
self.assertNotEqual(0, e.code)
|
||||
def test_older_fatal_msg(self):
|
||||
"""Test fatal require calls with old versions and message."""
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
git_command.git_require((2,), fail=True, msg="so sad")
|
||||
self.assertNotEqual(0, e.code)
|
||||
|
@ -22,167 +22,169 @@ import git_config
|
||||
|
||||
|
||||
def fixture(*paths):
|
||||
"""Return a path relative to test/fixtures.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||
"""Return a path relative to test/fixtures."""
|
||||
return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
|
||||
|
||||
|
||||
class GitConfigReadOnlyTests(unittest.TestCase):
|
||||
"""Read-only tests of the GitConfig class."""
|
||||
"""Read-only tests of the GitConfig class."""
|
||||
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||
"""
|
||||
config_fixture = fixture('test.gitconfig')
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
def setUp(self):
|
||||
"""Create a GitConfig object using the test.gitconfig fixture."""
|
||||
config_fixture = fixture("test.gitconfig")
|
||||
self.config = git_config.GitConfig(config_fixture)
|
||||
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
def test_GetString_with_empty_config_values(self):
|
||||
"""
|
||||
Test config entries with no value.
|
||||
|
||||
[section]
|
||||
empty
|
||||
[section]
|
||||
empty
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.empty')
|
||||
self.assertEqual(val, None)
|
||||
"""
|
||||
val = self.config.GetString("section.empty")
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
def test_GetString_with_true_value(self):
|
||||
"""
|
||||
Test config entries with a string value.
|
||||
|
||||
[section]
|
||||
nonempty = true
|
||||
[section]
|
||||
nonempty = true
|
||||
|
||||
"""
|
||||
val = self.config.GetString('section.nonempty')
|
||||
self.assertEqual(val, 'true')
|
||||
"""
|
||||
val = self.config.GetString("section.nonempty")
|
||||
self.assertEqual(val, "true")
|
||||
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture('not.present.gitconfig')
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString('empty')
|
||||
self.assertEqual(val, None)
|
||||
def test_GetString_from_missing_file(self):
|
||||
"""
|
||||
Test missing config file
|
||||
"""
|
||||
config_fixture = fixture("not.present.gitconfig")
|
||||
config = git_config.GitConfig(config_fixture)
|
||||
val = config.GetString("empty")
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def test_GetBoolean_undefined(self):
|
||||
"""Test GetBoolean on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.missing'))
|
||||
def test_GetBoolean_undefined(self):
|
||||
"""Test GetBoolean on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetBoolean("section.missing"))
|
||||
|
||||
def test_GetBoolean_invalid(self):
|
||||
"""Test GetBoolean on invalid boolean value."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.boolinvalid'))
|
||||
def test_GetBoolean_invalid(self):
|
||||
"""Test GetBoolean on invalid boolean value."""
|
||||
self.assertIsNone(self.config.GetBoolean("section.boolinvalid"))
|
||||
|
||||
def test_GetBoolean_true(self):
|
||||
"""Test GetBoolean on valid true boolean."""
|
||||
self.assertTrue(self.config.GetBoolean('section.booltrue'))
|
||||
def test_GetBoolean_true(self):
|
||||
"""Test GetBoolean on valid true boolean."""
|
||||
self.assertTrue(self.config.GetBoolean("section.booltrue"))
|
||||
|
||||
def test_GetBoolean_false(self):
|
||||
"""Test GetBoolean on valid false boolean."""
|
||||
self.assertFalse(self.config.GetBoolean('section.boolfalse'))
|
||||
def test_GetBoolean_false(self):
|
||||
"""Test GetBoolean on valid false boolean."""
|
||||
self.assertFalse(self.config.GetBoolean("section.boolfalse"))
|
||||
|
||||
def test_GetInt_undefined(self):
|
||||
"""Test GetInt on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetInt('section.missing'))
|
||||
def test_GetInt_undefined(self):
|
||||
"""Test GetInt on key that doesn't exist."""
|
||||
self.assertIsNone(self.config.GetInt("section.missing"))
|
||||
|
||||
def test_GetInt_invalid(self):
|
||||
"""Test GetInt on invalid integer value."""
|
||||
self.assertIsNone(self.config.GetBoolean('section.intinvalid'))
|
||||
def test_GetInt_invalid(self):
|
||||
"""Test GetInt on invalid integer value."""
|
||||
self.assertIsNone(self.config.GetBoolean("section.intinvalid"))
|
||||
|
||||
def test_GetInt_valid(self):
|
||||
"""Test GetInt on valid integers."""
|
||||
TESTS = (
|
||||
('inthex', 16),
|
||||
('inthexk', 16384),
|
||||
('int', 10),
|
||||
('intk', 10240),
|
||||
('intm', 10485760),
|
||||
('intg', 10737418240),
|
||||
)
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(value, self.config.GetInt('section.%s' % (key,)))
|
||||
def test_GetInt_valid(self):
|
||||
"""Test GetInt on valid integers."""
|
||||
TESTS = (
|
||||
("inthex", 16),
|
||||
("inthexk", 16384),
|
||||
("int", 10),
|
||||
("intk", 10240),
|
||||
("intm", 10485760),
|
||||
("intg", 10737418240),
|
||||
)
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(value, self.config.GetInt("section.%s" % (key,)))
|
||||
|
||||
|
||||
class GitConfigReadWriteTests(unittest.TestCase):
|
||||
"""Read/write tests of the GitConfig class."""
|
||||
"""Read/write tests of the GitConfig class."""
|
||||
|
||||
def setUp(self):
|
||||
self.tmpfile = tempfile.NamedTemporaryFile()
|
||||
self.config = self.get_config()
|
||||
def setUp(self):
|
||||
self.tmpfile = tempfile.NamedTemporaryFile()
|
||||
self.config = self.get_config()
|
||||
|
||||
def get_config(self):
|
||||
"""Get a new GitConfig instance."""
|
||||
return git_config.GitConfig(self.tmpfile.name)
|
||||
def get_config(self):
|
||||
"""Get a new GitConfig instance."""
|
||||
return git_config.GitConfig(self.tmpfile.name)
|
||||
|
||||
def test_SetString(self):
|
||||
"""Test SetString behavior."""
|
||||
# Set a value.
|
||||
self.assertIsNone(self.config.GetString('foo.bar'))
|
||||
self.config.SetString('foo.bar', 'val')
|
||||
self.assertEqual('val', self.config.GetString('foo.bar'))
|
||||
def test_SetString(self):
|
||||
"""Test SetString behavior."""
|
||||
# Set a value.
|
||||
self.assertIsNone(self.config.GetString("foo.bar"))
|
||||
self.config.SetString("foo.bar", "val")
|
||||
self.assertEqual("val", self.config.GetString("foo.bar"))
|
||||
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertEqual('val', config.GetString('foo.bar'))
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertEqual("val", config.GetString("foo.bar"))
|
||||
|
||||
# Update the value.
|
||||
self.config.SetString('foo.bar', 'valll')
|
||||
self.assertEqual('valll', self.config.GetString('foo.bar'))
|
||||
config = self.get_config()
|
||||
self.assertEqual('valll', config.GetString('foo.bar'))
|
||||
# Update the value.
|
||||
self.config.SetString("foo.bar", "valll")
|
||||
self.assertEqual("valll", self.config.GetString("foo.bar"))
|
||||
config = self.get_config()
|
||||
self.assertEqual("valll", config.GetString("foo.bar"))
|
||||
|
||||
# Delete the value.
|
||||
self.config.SetString('foo.bar', None)
|
||||
self.assertIsNone(self.config.GetString('foo.bar'))
|
||||
config = self.get_config()
|
||||
self.assertIsNone(config.GetString('foo.bar'))
|
||||
# Delete the value.
|
||||
self.config.SetString("foo.bar", None)
|
||||
self.assertIsNone(self.config.GetString("foo.bar"))
|
||||
config = self.get_config()
|
||||
self.assertIsNone(config.GetString("foo.bar"))
|
||||
|
||||
def test_SetBoolean(self):
|
||||
"""Test SetBoolean behavior."""
|
||||
# Set a true value.
|
||||
self.assertIsNone(self.config.GetBoolean('foo.bar'))
|
||||
for val in (True, 1):
|
||||
self.config.SetBoolean('foo.bar', val)
|
||||
self.assertTrue(self.config.GetBoolean('foo.bar'))
|
||||
def test_SetBoolean(self):
|
||||
"""Test SetBoolean behavior."""
|
||||
# Set a true value.
|
||||
self.assertIsNone(self.config.GetBoolean("foo.bar"))
|
||||
for val in (True, 1):
|
||||
self.config.SetBoolean("foo.bar", val)
|
||||
self.assertTrue(self.config.GetBoolean("foo.bar"))
|
||||
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertTrue(config.GetBoolean('foo.bar'))
|
||||
self.assertEqual('true', config.GetString('foo.bar'))
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertTrue(config.GetBoolean("foo.bar"))
|
||||
self.assertEqual("true", config.GetString("foo.bar"))
|
||||
|
||||
# Set a false value.
|
||||
for val in (False, 0):
|
||||
self.config.SetBoolean('foo.bar', val)
|
||||
self.assertFalse(self.config.GetBoolean('foo.bar'))
|
||||
# Set a false value.
|
||||
for val in (False, 0):
|
||||
self.config.SetBoolean("foo.bar", val)
|
||||
self.assertFalse(self.config.GetBoolean("foo.bar"))
|
||||
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertFalse(config.GetBoolean('foo.bar'))
|
||||
self.assertEqual('false', config.GetString('foo.bar'))
|
||||
# Make sure the value was actually written out.
|
||||
config = self.get_config()
|
||||
self.assertFalse(config.GetBoolean("foo.bar"))
|
||||
self.assertEqual("false", config.GetString("foo.bar"))
|
||||
|
||||
# Delete the value.
|
||||
self.config.SetBoolean('foo.bar', None)
|
||||
self.assertIsNone(self.config.GetBoolean('foo.bar'))
|
||||
config = self.get_config()
|
||||
self.assertIsNone(config.GetBoolean('foo.bar'))
|
||||
# Delete the value.
|
||||
self.config.SetBoolean("foo.bar", None)
|
||||
self.assertIsNone(self.config.GetBoolean("foo.bar"))
|
||||
config = self.get_config()
|
||||
self.assertIsNone(config.GetBoolean("foo.bar"))
|
||||
|
||||
def test_GetSyncAnalysisStateData(self):
|
||||
"""Test config entries with a sync state analysis data."""
|
||||
superproject_logging_data = {}
|
||||
superproject_logging_data['test'] = False
|
||||
options = type('options', (object,), {})()
|
||||
options.verbose = 'true'
|
||||
options.mp_update = 'false'
|
||||
TESTS = (
|
||||
('superproject.test', 'false'),
|
||||
('options.verbose', 'true'),
|
||||
('options.mpupdate', 'false'),
|
||||
('main.version', '1'),
|
||||
)
|
||||
self.config.UpdateSyncAnalysisState(options, superproject_logging_data)
|
||||
sync_data = self.config.GetSyncAnalysisStateData()
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value)
|
||||
self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime'])
|
||||
def test_GetSyncAnalysisStateData(self):
|
||||
"""Test config entries with a sync state analysis data."""
|
||||
superproject_logging_data = {}
|
||||
superproject_logging_data["test"] = False
|
||||
options = type("options", (object,), {})()
|
||||
options.verbose = "true"
|
||||
options.mp_update = "false"
|
||||
TESTS = (
|
||||
("superproject.test", "false"),
|
||||
("options.verbose", "true"),
|
||||
("options.mpupdate", "false"),
|
||||
("main.version", "1"),
|
||||
)
|
||||
self.config.UpdateSyncAnalysisState(options, superproject_logging_data)
|
||||
sync_data = self.config.GetSyncAnalysisStateData()
|
||||
for key, value in TESTS:
|
||||
self.assertEqual(
|
||||
sync_data[f"{git_config.SYNC_STATE_PREFIX}{key}"], value
|
||||
)
|
||||
self.assertTrue(
|
||||
sync_data[f"{git_config.SYNC_STATE_PREFIX}main.synctime"]
|
||||
)
|
||||
|
@ -28,297 +28,369 @@ from test_manifest_xml import sort_attributes
|
||||
|
||||
|
||||
class SuperprojectTestCase(unittest.TestCase):
|
||||
"""TestCase for the Superproject module."""
|
||||
"""TestCase for the Superproject module."""
|
||||
|
||||
PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
PARENT_SID_VALUE = 'parent_sid'
|
||||
SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
|
||||
FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
||||
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
||||
PARENT_SID_VALUE = "parent_sid"
|
||||
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
||||
FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
||||
|
||||
def setUp(self):
|
||||
"""Set up superproject every time."""
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
self.tempdir = self.tempdirobj.name
|
||||
self.repodir = os.path.join(self.tempdir, '.repo')
|
||||
self.manifest_file = os.path.join(
|
||||
self.repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
os.mkdir(self.repodir)
|
||||
self.platform = platform.system().lower()
|
||||
def setUp(self):
|
||||
"""Set up superproject every time."""
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
|
||||
self.tempdir = self.tempdirobj.name
|
||||
self.repodir = os.path.join(self.tempdir, ".repo")
|
||||
self.manifest_file = os.path.join(
|
||||
self.repodir, manifest_xml.MANIFEST_FILE_NAME
|
||||
)
|
||||
os.mkdir(self.repodir)
|
||||
self.platform = platform.system().lower()
|
||||
|
||||
# By default we initialize with the expected case where
|
||||
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
|
||||
env = {
|
||||
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
|
||||
}
|
||||
self.git_event_log = git_trace2_event_log.EventLog(env=env)
|
||||
# By default we initialize with the expected case where
|
||||
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
|
||||
env = {
|
||||
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
|
||||
}
|
||||
self.git_event_log = git_trace2_event_log.EventLog(env=env)
|
||||
|
||||
# The manifest parsing really wants a git repo currently.
|
||||
gitdir = os.path.join(self.repodir, 'manifests.git')
|
||||
os.mkdir(gitdir)
|
||||
with open(os.path.join(gitdir, 'config'), 'w') as fp:
|
||||
fp.write("""[remote "origin"]
|
||||
# The manifest parsing really wants a git repo currently.
|
||||
gitdir = os.path.join(self.repodir, "manifests.git")
|
||||
os.mkdir(gitdir)
|
||||
with open(os.path.join(gitdir, "config"), "w") as fp:
|
||||
fp.write(
|
||||
"""[remote "origin"]
|
||||
url = https://localhost:0/manifest
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
manifest = self.getXmlManifest("""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-"""
|
||||
+ self.platform
|
||||
+ """
|
||||
" /></manifest>
|
||||
""")
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
"""
|
||||
)
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("default-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
"""Tear down superproject every time."""
|
||||
self.tempdirobj.cleanup()
|
||||
def tearDown(self):
|
||||
"""Tear down superproject every time."""
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, 'w') as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
def getXmlManifest(self, data):
|
||||
"""Helper to initialize a manifest for testing."""
|
||||
with open(self.manifest_file, "w") as fp:
|
||||
fp.write(data)
|
||||
return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
|
||||
|
||||
def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
|
||||
"""Helper function to verify common event log keys."""
|
||||
self.assertIn('event', log_entry)
|
||||
self.assertIn('sid', log_entry)
|
||||
self.assertIn('thread', log_entry)
|
||||
self.assertIn('time', log_entry)
|
||||
def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
|
||||
"""Helper function to verify common event log keys."""
|
||||
self.assertIn("event", log_entry)
|
||||
self.assertIn("sid", log_entry)
|
||||
self.assertIn("thread", log_entry)
|
||||
self.assertIn("time", log_entry)
|
||||
|
||||
# Do basic data format validation.
|
||||
self.assertEqual(expected_event_name, log_entry['event'])
|
||||
if full_sid:
|
||||
self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
|
||||
else:
|
||||
self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
|
||||
self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
|
||||
# Do basic data format validation.
|
||||
self.assertEqual(expected_event_name, log_entry["event"])
|
||||
if full_sid:
|
||||
self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
|
||||
else:
|
||||
self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
|
||||
self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
|
||||
|
||||
def readLog(self, log_path):
|
||||
"""Helper function to read log data into a list."""
|
||||
log_data = []
|
||||
with open(log_path, mode='rb') as f:
|
||||
for line in f:
|
||||
log_data.append(json.loads(line))
|
||||
return log_data
|
||||
def readLog(self, log_path):
|
||||
"""Helper function to read log data into a list."""
|
||||
log_data = []
|
||||
with open(log_path, mode="rb") as f:
|
||||
for line in f:
|
||||
log_data.append(json.loads(line))
|
||||
return log_data
|
||||
|
||||
def verifyErrorEvent(self):
|
||||
"""Helper to verify that error event is written."""
|
||||
def verifyErrorEvent(self):
|
||||
"""Helper to verify that error event is written."""
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self.git_event_log.Write(path=tempdir)
|
||||
self.log_data = self.readLog(log_path)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self.git_event_log.Write(path=tempdir)
|
||||
self.log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self.log_data), 2)
|
||||
error_event = self.log_data[1]
|
||||
self.verifyCommonKeys(self.log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(error_event, expected_event_name='error')
|
||||
# Check for 'error' event specific fields.
|
||||
self.assertIn('msg', error_event)
|
||||
self.assertIn('fmt', error_event)
|
||||
self.assertEqual(len(self.log_data), 2)
|
||||
error_event = self.log_data[1]
|
||||
self.verifyCommonKeys(self.log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(error_event, expected_event_name="error")
|
||||
# Check for 'error' event specific fields.
|
||||
self.assertIn("msg", error_event)
|
||||
self.assertIn("fmt", error_event)
|
||||
|
||||
def test_superproject_get_superproject_no_superproject(self):
|
||||
"""Test with no url."""
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_superproject_get_superproject_no_superproject(self):
|
||||
"""Test with no url."""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
</manifest>
|
||||
""")
|
||||
self.assertIsNone(manifest.superproject)
|
||||
"""
|
||||
)
|
||||
self.assertIsNone(manifest.superproject)
|
||||
|
||||
def test_superproject_get_superproject_invalid_url(self):
|
||||
"""Test with an invalid url."""
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_superproject_get_superproject_invalid_url(self):
|
||||
"""Test with an invalid url."""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
</manifest>
|
||||
""")
|
||||
superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
sync_result = superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
|
||||
def test_superproject_get_superproject_invalid_branch(self):
|
||||
"""Test with an invalid branch."""
|
||||
manifest = self.getXmlManifest("""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
</manifest>
|
||||
""")
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
with mock.patch.object(self._superproject, '_branch', 'junk'):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
self.verifyErrorEvent()
|
||||
|
||||
def test_superproject_get_superproject_mock_init(self):
|
||||
"""Test with _Init failing."""
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=False):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
|
||||
def test_superproject_get_superproject_mock_fetch(self):
|
||||
"""Test with _Fetch failing."""
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
with mock.patch.object(self._superproject, '_Fetch', return_value=False):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
"""
|
||||
)
|
||||
superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("test-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
sync_result = superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
|
||||
def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
|
||||
"""Test with LsTree being a mock."""
|
||||
data = ('120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00'
|
||||
'160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
|
||||
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00'
|
||||
'120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00'
|
||||
'160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00')
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
|
||||
with mock.patch.object(self._superproject, '_LsTree', return_value=data):
|
||||
commit_ids_result = self._superproject._GetAllProjectsCommitIds()
|
||||
self.assertEqual(commit_ids_result.commit_ids, {
|
||||
'art': '2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea',
|
||||
'bootable/recovery': 'e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06',
|
||||
'build/bazel': 'ade9b7a0d874e25fff4bf2552488825c6f111928'
|
||||
})
|
||||
self.assertFalse(commit_ids_result.fatal)
|
||||
def test_superproject_get_superproject_invalid_branch(self):
|
||||
"""Test with an invalid branch."""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="test-remote" fetch="localhost" />
|
||||
<default remote="test-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
</manifest>
|
||||
"""
|
||||
)
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("test-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
with mock.patch.object(self._superproject, "_branch", "junk"):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
self.verifyErrorEvent()
|
||||
|
||||
def test_superproject_write_manifest_file(self):
|
||||
"""Test with writing manifest to a file after setting revisionId."""
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 1)
|
||||
project = self._superproject._manifest.projects[0]
|
||||
project.SetRevisionId('ABCDEF')
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
manifest_path = self._superproject._WriteManifestFile()
|
||||
self.assertIsNotNone(manifest_path)
|
||||
with open(manifest_path, 'r') as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-' + self.platform + '" '
|
||||
'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
def test_superproject_get_superproject_mock_init(self):
|
||||
"""Test with _Init failing."""
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=False):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
|
||||
def test_superproject_update_project_revision_id(self):
|
||||
"""Test with LsTree being a mock."""
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 1)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
|
||||
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00')
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
|
||||
with mock.patch.object(self._superproject,
|
||||
'_LsTree',
|
||||
return_value=data):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, 'r') as fp:
|
||||
def test_superproject_get_superproject_mock_fetch(self):
|
||||
"""Test with _Fetch failing."""
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
with mock.patch.object(
|
||||
self._superproject, "_Fetch", return_value=False
|
||||
):
|
||||
sync_result = self._superproject.Sync(self.git_event_log)
|
||||
self.assertFalse(sync_result.success)
|
||||
self.assertTrue(sync_result.fatal)
|
||||
|
||||
def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
|
||||
"""Test with LsTree being a mock."""
|
||||
data = (
|
||||
"120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00"
|
||||
"160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
|
||||
"160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
|
||||
"120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00"
|
||||
"160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00"
|
||||
)
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_Fetch", return_value=True
|
||||
):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_LsTree", return_value=data
|
||||
):
|
||||
commit_ids_result = (
|
||||
self._superproject._GetAllProjectsCommitIds()
|
||||
)
|
||||
self.assertEqual(
|
||||
commit_ids_result.commit_ids,
|
||||
{
|
||||
"art": "2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea",
|
||||
"bootable/recovery": "e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06",
|
||||
"build/bazel": "ade9b7a0d874e25fff4bf2552488825c6f111928",
|
||||
},
|
||||
)
|
||||
self.assertFalse(commit_ids_result.fatal)
|
||||
|
||||
def test_superproject_write_manifest_file(self):
|
||||
"""Test with writing manifest to a file after setting revisionId."""
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 1)
|
||||
project = self._superproject._manifest.projects[0]
|
||||
project.SetRevisionId("ABCDEF")
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
manifest_path = self._superproject._WriteManifestFile()
|
||||
self.assertIsNotNone(manifest_path)
|
||||
with open(manifest_path, "r") as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-' + self.platform + '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-' + self.platform + '" '
|
||||
'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_superproject_update_project_revision_id_no_superproject_tag(self):
|
||||
"""Test update of commit ids of a manifest without superproject tag."""
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_superproject_update_project_revision_id(self):
|
||||
"""Test with LsTree being a mock."""
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 1)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = (
|
||||
"160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
|
||||
"160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
|
||||
)
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_Fetch", return_value=True
|
||||
):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_LsTree", return_value=data
|
||||
):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(
|
||||
projects, self.git_event_log
|
||||
)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, "r") as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-'
|
||||
+ self.platform
|
||||
+ '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_superproject_update_project_revision_id_no_superproject_tag(self):
|
||||
"""Test update of commit ids of a manifest without superproject tag."""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<project name="test-name"/>
|
||||
</manifest>
|
||||
""")
|
||||
self.maxDiff = None
|
||||
self.assertIsNone(manifest.superproject)
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest.ToXml().toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project name="test-name"/>'
|
||||
'</manifest>')
|
||||
"""
|
||||
)
|
||||
self.maxDiff = None
|
||||
self.assertIsNone(manifest.superproject)
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest.ToXml().toxml()),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project name="test-name"/>'
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_superproject_update_project_revision_id_from_local_manifest_group(self):
|
||||
"""Test update of commit ids of a manifest that have local manifest no superproject group."""
|
||||
local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ':local'
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_superproject_update_project_revision_id_from_local_manifest_group(
|
||||
self,
|
||||
):
|
||||
"""Test update of commit ids of a manifest that have local manifest no superproject group."""
|
||||
local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ":local"
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<remote name="goog" fetch="http://localhost2" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
<project path="vendor/x" name="platform/vendor/x" remote="goog"
|
||||
groups=\"""" + local_group + """
|
||||
groups=\""""
|
||||
+ local_group
|
||||
+ """
|
||||
" revision="master-with-vendor" clone-depth="1" />
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-"""
|
||||
+ self.platform
|
||||
+ """
|
||||
" /></manifest>
|
||||
""")
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 2)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00')
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
|
||||
with mock.patch.object(self._superproject,
|
||||
'_LsTree',
|
||||
return_value=data):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, 'r') as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
# Verify platform/vendor/x's project revision hasn't changed.
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<remote fetch="http://localhost2" name="goog"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-' + self.platform + '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
"""
|
||||
)
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("default-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 2)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_Fetch", return_value=True
|
||||
):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_LsTree", return_value=data
|
||||
):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(
|
||||
projects, self.git_event_log
|
||||
)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, "r") as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
# Verify platform/vendor/x's project revision hasn't
|
||||
# changed.
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<remote fetch="http://localhost2" name="goog"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-'
|
||||
+ self.platform
|
||||
+ '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_superproject_update_project_revision_id_with_pinned_manifest(self):
|
||||
"""Test update of commit ids of a pinned manifest."""
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_superproject_update_project_revision_id_with_pinned_manifest(self):
|
||||
"""Test update of commit ids of a pinned manifest."""
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
@ -326,80 +398,132 @@ class SuperprojectTestCase(unittest.TestCase):
|
||||
<project path="vendor/x" name="platform/vendor/x" revision="" />
|
||||
<project path="vendor/y" name="platform/vendor/y"
|
||||
revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f" />
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """
|
||||
<project path="art" name="platform/art" groups="notdefault,platform-"""
|
||||
+ self.platform
|
||||
+ """
|
||||
" /></manifest>
|
||||
""")
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 3)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00'
|
||||
'160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00')
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
with mock.patch.object(self._superproject, '_Fetch', return_value=True):
|
||||
with mock.patch.object(self._superproject,
|
||||
'_LsTree',
|
||||
return_value=data):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, 'r') as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
# Verify platform/vendor/x's project revision hasn't changed.
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-' + self.platform + '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<project name="platform/vendor/x" path="vendor/x" '
|
||||
'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>'
|
||||
'<project name="platform/vendor/y" path="vendor/y" '
|
||||
'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
'</manifest>')
|
||||
"""
|
||||
)
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("default-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
self.assertEqual(len(self._superproject._manifest.projects), 3)
|
||||
projects = self._superproject._manifest.projects
|
||||
data = (
|
||||
"160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
|
||||
"160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00"
|
||||
)
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_Fetch", return_value=True
|
||||
):
|
||||
with mock.patch.object(
|
||||
self._superproject, "_LsTree", return_value=data
|
||||
):
|
||||
# Create temporary directory so that it can write the file.
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
update_result = self._superproject.UpdateProjectsRevisionId(
|
||||
projects, self.git_event_log
|
||||
)
|
||||
self.assertIsNotNone(update_result.manifest_path)
|
||||
self.assertFalse(update_result.fatal)
|
||||
with open(update_result.manifest_path, "r") as fp:
|
||||
manifest_xml_data = fp.read()
|
||||
# Verify platform/vendor/x's project revision hasn't
|
||||
# changed.
|
||||
self.assertEqual(
|
||||
sort_attributes(manifest_xml_data),
|
||||
'<?xml version="1.0" ?><manifest>'
|
||||
'<remote fetch="http://localhost" name="default-remote"/>'
|
||||
'<default remote="default-remote" revision="refs/heads/main"/>'
|
||||
'<project groups="notdefault,platform-'
|
||||
+ self.platform
|
||||
+ '" '
|
||||
'name="platform/art" path="art" '
|
||||
'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
|
||||
'<project name="platform/vendor/x" path="vendor/x" '
|
||||
'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>'
|
||||
'<project name="platform/vendor/y" path="vendor/y" '
|
||||
'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
|
||||
'<superproject name="superproject"/>'
|
||||
"</manifest>",
|
||||
)
|
||||
|
||||
def test_Fetch(self):
|
||||
manifest = self.getXmlManifest("""
|
||||
def test_Fetch(self):
|
||||
manifest = self.getXmlManifest(
|
||||
"""
|
||||
<manifest>
|
||||
<remote name="default-remote" fetch="http://localhost" />
|
||||
<default remote="default-remote" revision="refs/heads/main" />
|
||||
<superproject name="superproject"/>
|
||||
" /></manifest>
|
||||
""")
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest, name='superproject',
|
||||
remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'),
|
||||
revision='refs/heads/main')
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
os.mkdir(self._superproject._work_git)
|
||||
with mock.patch.object(self._superproject, '_Init', return_value=True):
|
||||
with mock.patch('git_superproject.GitCommand', autospec=True) as mock_git_command:
|
||||
with mock.patch('git_superproject.GitRefs.get', autospec=True) as mock_git_refs:
|
||||
instance = mock_git_command.return_value
|
||||
instance.Wait.return_value = 0
|
||||
mock_git_refs.side_effect = ['', '1234']
|
||||
"""
|
||||
)
|
||||
self.maxDiff = None
|
||||
self._superproject = git_superproject.Superproject(
|
||||
manifest,
|
||||
name="superproject",
|
||||
remote=manifest.remotes.get("default-remote").ToRemoteSpec(
|
||||
"superproject"
|
||||
),
|
||||
revision="refs/heads/main",
|
||||
)
|
||||
os.mkdir(self._superproject._superproject_path)
|
||||
os.mkdir(self._superproject._work_git)
|
||||
with mock.patch.object(self._superproject, "_Init", return_value=True):
|
||||
with mock.patch(
|
||||
"git_superproject.GitCommand", autospec=True
|
||||
) as mock_git_command:
|
||||
with mock.patch(
|
||||
"git_superproject.GitRefs.get", autospec=True
|
||||
) as mock_git_refs:
|
||||
instance = mock_git_command.return_value
|
||||
instance.Wait.return_value = 0
|
||||
mock_git_refs.side_effect = ["", "1234"]
|
||||
|
||||
self.assertTrue(self._superproject._Fetch())
|
||||
self.assertEqual(mock_git_command.call_args.args,(None, [
|
||||
'fetch', 'http://localhost/superproject', '--depth', '1',
|
||||
'--force', '--no-tags', '--filter', 'blob:none',
|
||||
'refs/heads/main:refs/heads/main'
|
||||
]))
|
||||
self.assertTrue(self._superproject._Fetch())
|
||||
self.assertEqual(
|
||||
mock_git_command.call_args.args,
|
||||
(
|
||||
None,
|
||||
[
|
||||
"fetch",
|
||||
"http://localhost/superproject",
|
||||
"--depth",
|
||||
"1",
|
||||
"--force",
|
||||
"--no-tags",
|
||||
"--filter",
|
||||
"blob:none",
|
||||
"refs/heads/main:refs/heads/main",
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
# If branch for revision exists, set as --negotiation-tip.
|
||||
self.assertTrue(self._superproject._Fetch())
|
||||
self.assertEqual(mock_git_command.call_args.args,(None, [
|
||||
'fetch', 'http://localhost/superproject', '--depth', '1',
|
||||
'--force', '--no-tags', '--filter', 'blob:none',
|
||||
'--negotiation-tip', '1234',
|
||||
'refs/heads/main:refs/heads/main'
|
||||
]))
|
||||
# If branch for revision exists, set as --negotiation-tip.
|
||||
self.assertTrue(self._superproject._Fetch())
|
||||
self.assertEqual(
|
||||
mock_git_command.call_args.args,
|
||||
(
|
||||
None,
|
||||
[
|
||||
"fetch",
|
||||
"http://localhost/superproject",
|
||||
"--depth",
|
||||
"1",
|
||||
"--force",
|
||||
"--no-tags",
|
||||
"--filter",
|
||||
"blob:none",
|
||||
"--negotiation-tip",
|
||||
"1234",
|
||||
"refs/heads/main:refs/heads/main",
|
||||
],
|
||||
),
|
||||
)
|
||||
|
@ -27,361 +27,382 @@ import platform_utils
|
||||
|
||||
|
||||
def serverLoggingThread(socket_path, server_ready, received_traces):
|
||||
"""Helper function to receive logs over a Unix domain socket.
|
||||
"""Helper function to receive logs over a Unix domain socket.
|
||||
|
||||
Appends received messages on the provided socket and appends to received_traces.
|
||||
Appends received messages on the provided socket and appends to
|
||||
received_traces.
|
||||
|
||||
Args:
|
||||
socket_path: path to a Unix domain socket on which to listen for traces
|
||||
server_ready: a threading.Condition used to signal to the caller that this thread is ready to
|
||||
accept connections
|
||||
received_traces: a list to which received traces will be appended (after decoding to a utf-8
|
||||
string).
|
||||
"""
|
||||
platform_utils.remove(socket_path, missing_ok=True)
|
||||
data = b''
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
|
||||
sock.bind(socket_path)
|
||||
sock.listen(0)
|
||||
with server_ready:
|
||||
server_ready.notify()
|
||||
with sock.accept()[0] as conn:
|
||||
while True:
|
||||
recved = conn.recv(4096)
|
||||
if not recved:
|
||||
break
|
||||
data += recved
|
||||
received_traces.extend(data.decode('utf-8').splitlines())
|
||||
Args:
|
||||
socket_path: path to a Unix domain socket on which to listen for traces
|
||||
server_ready: a threading.Condition used to signal to the caller that
|
||||
this thread is ready to accept connections
|
||||
received_traces: a list to which received traces will be appended (after
|
||||
decoding to a utf-8 string).
|
||||
"""
|
||||
platform_utils.remove(socket_path, missing_ok=True)
|
||||
data = b""
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
|
||||
sock.bind(socket_path)
|
||||
sock.listen(0)
|
||||
with server_ready:
|
||||
server_ready.notify()
|
||||
with sock.accept()[0] as conn:
|
||||
while True:
|
||||
recved = conn.recv(4096)
|
||||
if not recved:
|
||||
break
|
||||
data += recved
|
||||
received_traces.extend(data.decode("utf-8").splitlines())
|
||||
|
||||
|
||||
class EventLogTestCase(unittest.TestCase):
|
||||
"""TestCase for the EventLog module."""
|
||||
"""TestCase for the EventLog module."""
|
||||
|
||||
PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID'
|
||||
PARENT_SID_VALUE = 'parent_sid'
|
||||
SELF_SID_REGEX = r'repo-\d+T\d+Z-.*'
|
||||
FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
||||
PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
|
||||
PARENT_SID_VALUE = "parent_sid"
|
||||
SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
|
||||
FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
|
||||
|
||||
def setUp(self):
|
||||
"""Load the event_log module every time."""
|
||||
self._event_log_module = None
|
||||
# By default we initialize with the expected case where
|
||||
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
|
||||
env = {
|
||||
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
|
||||
}
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env=env)
|
||||
self._log_data = None
|
||||
def setUp(self):
|
||||
"""Load the event_log module every time."""
|
||||
self._event_log_module = None
|
||||
# By default we initialize with the expected case where
|
||||
# repo launches us (so GIT_TRACE2_PARENT_SID is set).
|
||||
env = {
|
||||
self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
|
||||
}
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env=env)
|
||||
self._log_data = None
|
||||
|
||||
def verifyCommonKeys(self, log_entry, expected_event_name=None, full_sid=True):
|
||||
"""Helper function to verify common event log keys."""
|
||||
self.assertIn('event', log_entry)
|
||||
self.assertIn('sid', log_entry)
|
||||
self.assertIn('thread', log_entry)
|
||||
self.assertIn('time', log_entry)
|
||||
def verifyCommonKeys(
|
||||
self, log_entry, expected_event_name=None, full_sid=True
|
||||
):
|
||||
"""Helper function to verify common event log keys."""
|
||||
self.assertIn("event", log_entry)
|
||||
self.assertIn("sid", log_entry)
|
||||
self.assertIn("thread", log_entry)
|
||||
self.assertIn("time", log_entry)
|
||||
|
||||
# Do basic data format validation.
|
||||
if expected_event_name:
|
||||
self.assertEqual(expected_event_name, log_entry['event'])
|
||||
if full_sid:
|
||||
self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
|
||||
else:
|
||||
self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
|
||||
self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
|
||||
# Do basic data format validation.
|
||||
if expected_event_name:
|
||||
self.assertEqual(expected_event_name, log_entry["event"])
|
||||
if full_sid:
|
||||
self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
|
||||
else:
|
||||
self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
|
||||
self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
|
||||
|
||||
def readLog(self, log_path):
|
||||
"""Helper function to read log data into a list."""
|
||||
log_data = []
|
||||
with open(log_path, mode='rb') as f:
|
||||
for line in f:
|
||||
log_data.append(json.loads(line))
|
||||
return log_data
|
||||
def readLog(self, log_path):
|
||||
"""Helper function to read log data into a list."""
|
||||
log_data = []
|
||||
with open(log_path, mode="rb") as f:
|
||||
for line in f:
|
||||
log_data.append(json.loads(line))
|
||||
return log_data
|
||||
|
||||
def remove_prefix(self, s, prefix):
|
||||
"""Return a copy string after removing |prefix| from |s|, if present or the original string."""
|
||||
if s.startswith(prefix):
|
||||
return s[len(prefix):]
|
||||
else:
|
||||
return s
|
||||
def remove_prefix(self, s, prefix):
|
||||
"""Return a copy string after removing |prefix| from |s|, if present or
|
||||
the original string."""
|
||||
if s.startswith(prefix):
|
||||
return s[len(prefix) :]
|
||||
else:
|
||||
return s
|
||||
|
||||
def test_initial_state_with_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
|
||||
self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
|
||||
def test_initial_state_with_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
|
||||
self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
|
||||
|
||||
def test_initial_state_no_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
|
||||
# Setup an empty environment dict (no parent sid).
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env={})
|
||||
self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
|
||||
def test_initial_state_no_parent_sid(self):
|
||||
"""Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
|
||||
# Setup an empty environment dict (no parent sid).
|
||||
self._event_log_module = git_trace2_event_log.EventLog(env={})
|
||||
self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
|
||||
|
||||
def test_version_event(self):
|
||||
"""Test 'version' event data is valid.
|
||||
def test_version_event(self):
|
||||
"""Test 'version' event data is valid.
|
||||
|
||||
Verify that the 'version' event is written even when no other
|
||||
events are addded.
|
||||
Verify that the 'version' event is written even when no other
|
||||
events are addded.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
"""
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
Expected event log:
|
||||
<version event>
|
||||
"""
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
# A log with no added events should only have the version entry.
|
||||
self.assertEqual(len(self._log_data), 1)
|
||||
version_event = self._log_data[0]
|
||||
self.verifyCommonKeys(version_event, expected_event_name='version')
|
||||
# Check for 'version' event specific fields.
|
||||
self.assertIn('evt', version_event)
|
||||
self.assertIn('exe', version_event)
|
||||
# Verify "evt" version field is a string.
|
||||
self.assertIsInstance(version_event['evt'], str)
|
||||
# A log with no added events should only have the version entry.
|
||||
self.assertEqual(len(self._log_data), 1)
|
||||
version_event = self._log_data[0]
|
||||
self.verifyCommonKeys(version_event, expected_event_name="version")
|
||||
# Check for 'version' event specific fields.
|
||||
self.assertIn("evt", version_event)
|
||||
self.assertIn("exe", version_event)
|
||||
# Verify "evt" version field is a string.
|
||||
self.assertIsInstance(version_event["evt"], str)
|
||||
|
||||
def test_start_event(self):
|
||||
"""Test and validate 'start' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<start event>
|
||||
"""
|
||||
self._event_log_module.StartEvent()
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
start_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(start_event, expected_event_name='start')
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn('argv', start_event)
|
||||
self.assertTrue(isinstance(start_event['argv'], list))
|
||||
|
||||
def test_exit_event_result_none(self):
|
||||
"""Test 'exit' event data is valid when result is None.
|
||||
|
||||
We expect None result to be converted to 0 in the exit event data.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(None)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(exit_event, expected_event_name='exit')
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn('code', exit_event)
|
||||
# 'None' result should convert to 0 (successful) return code.
|
||||
self.assertEqual(exit_event['code'], 0)
|
||||
|
||||
def test_exit_event_result_integer(self):
|
||||
"""Test 'exit' event data is valid when result is an integer.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(2)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(exit_event, expected_event_name='exit')
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn('code', exit_event)
|
||||
self.assertEqual(exit_event['code'], 2)
|
||||
|
||||
def test_command_event(self):
|
||||
"""Test and validate 'command' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<command event>
|
||||
"""
|
||||
name = 'repo'
|
||||
subcommands = ['init' 'this']
|
||||
self._event_log_module.CommandEvent(name='repo', subcommands=subcommands)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
command_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(command_event, expected_event_name='command')
|
||||
# Check for 'command' event specific fields.
|
||||
self.assertIn('name', command_event)
|
||||
self.assertIn('subcommands', command_event)
|
||||
self.assertEqual(command_event['name'], name)
|
||||
self.assertEqual(command_event['subcommands'], subcommands)
|
||||
|
||||
def test_def_params_event_repo_config(self):
|
||||
"""Test 'def_params' event data outputs only repo config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<def_param event>
|
||||
<def_param event>
|
||||
"""
|
||||
config = {
|
||||
'git.foo': 'bar',
|
||||
'repo.partialclone': 'true',
|
||||
'repo.partialclonefilter': 'blob:none',
|
||||
}
|
||||
self._event_log_module.DefParamRepoEvents(config)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 3)
|
||||
def_param_events = self._log_data[1:]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
|
||||
for event in def_param_events:
|
||||
self.verifyCommonKeys(event, expected_event_name='def_param')
|
||||
# Check for 'def_param' event specific fields.
|
||||
self.assertIn('param', event)
|
||||
self.assertIn('value', event)
|
||||
self.assertTrue(event['param'].startswith('repo.'))
|
||||
|
||||
def test_def_params_event_no_repo_config(self):
|
||||
"""Test 'def_params' event data won't output non-repo config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
"""
|
||||
config = {
|
||||
'git.foo': 'bar',
|
||||
'git.core.foo2': 'baz',
|
||||
}
|
||||
self._event_log_module.DefParamRepoEvents(config)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 1)
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
|
||||
def test_data_event_config(self):
|
||||
"""Test 'data' event data outputs all config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<data event>
|
||||
<data event>
|
||||
"""
|
||||
config = {
|
||||
'git.foo': 'bar',
|
||||
'repo.partialclone': 'false',
|
||||
'repo.syncstate.superproject.hassuperprojecttag': 'true',
|
||||
'repo.syncstate.superproject.sys.argv': ['--', 'sync', 'protobuf'],
|
||||
}
|
||||
prefix_value = 'prefix'
|
||||
self._event_log_module.LogDataConfigEvents(config, prefix_value)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 5)
|
||||
data_events = self._log_data[1:]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
|
||||
for event in data_events:
|
||||
self.verifyCommonKeys(event)
|
||||
# Check for 'data' event specific fields.
|
||||
self.assertIn('key', event)
|
||||
self.assertIn('value', event)
|
||||
key = event['key']
|
||||
key = self.remove_prefix(key, f'{prefix_value}/')
|
||||
value = event['value']
|
||||
self.assertEqual(self._event_log_module.GetDataEventName(value), event['event'])
|
||||
self.assertTrue(key in config and value == config[key])
|
||||
|
||||
def test_error_event(self):
|
||||
"""Test and validate 'error' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<error event>
|
||||
"""
|
||||
msg = 'invalid option: --cahced'
|
||||
fmt = 'invalid option: %s'
|
||||
self._event_log_module.ErrorEvent(msg, fmt)
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
error_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
|
||||
self.verifyCommonKeys(error_event, expected_event_name='error')
|
||||
# Check for 'error' event specific fields.
|
||||
self.assertIn('msg', error_event)
|
||||
self.assertIn('fmt', error_event)
|
||||
self.assertEqual(error_event['msg'], msg)
|
||||
self.assertEqual(error_event['fmt'], fmt)
|
||||
|
||||
def test_write_with_filename(self):
|
||||
"""Test Write() with a path to a file exits with None."""
|
||||
self.assertIsNone(self._event_log_module.Write(path='path/to/file'))
|
||||
|
||||
def test_write_with_git_config(self):
|
||||
"""Test Write() uses the git config path when 'git config' call succeeds."""
|
||||
with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
|
||||
with mock.patch.object(self._event_log_module,
|
||||
'_GetEventTargetPath', return_value=tempdir):
|
||||
self.assertEqual(os.path.dirname(self._event_log_module.Write()), tempdir)
|
||||
|
||||
def test_write_no_git_config(self):
|
||||
"""Test Write() with no git config variable present exits with None."""
|
||||
with mock.patch.object(self._event_log_module,
|
||||
'_GetEventTargetPath', return_value=None):
|
||||
self.assertIsNone(self._event_log_module.Write())
|
||||
|
||||
def test_write_non_string(self):
|
||||
"""Test Write() with non-string type for |path| throws TypeError."""
|
||||
with self.assertRaises(TypeError):
|
||||
self._event_log_module.Write(path=1234)
|
||||
|
||||
def test_write_socket(self):
|
||||
"""Test Write() with Unix domain socket for |path| and validate received traces."""
|
||||
received_traces = []
|
||||
with tempfile.TemporaryDirectory(prefix='test_server_sockets') as tempdir:
|
||||
socket_path = os.path.join(tempdir, "server.sock")
|
||||
server_ready = threading.Condition()
|
||||
# Start "server" listening on Unix domain socket at socket_path.
|
||||
try:
|
||||
server_thread = threading.Thread(
|
||||
target=serverLoggingThread,
|
||||
args=(socket_path, server_ready, received_traces))
|
||||
server_thread.start()
|
||||
|
||||
with server_ready:
|
||||
server_ready.wait(timeout=120)
|
||||
def test_start_event(self):
|
||||
"""Test and validate 'start' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<start event>
|
||||
"""
|
||||
self._event_log_module.StartEvent()
|
||||
path = self._event_log_module.Write(path=f'af_unix:{socket_path}')
|
||||
finally:
|
||||
server_thread.join(timeout=5)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(path, f'af_unix:stream:{socket_path}')
|
||||
self.assertEqual(len(received_traces), 2)
|
||||
version_event = json.loads(received_traces[0])
|
||||
start_event = json.loads(received_traces[1])
|
||||
self.verifyCommonKeys(version_event, expected_event_name='version')
|
||||
self.verifyCommonKeys(start_event, expected_event_name='start')
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn('argv', start_event)
|
||||
self.assertIsInstance(start_event['argv'], list)
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
start_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(start_event, expected_event_name="start")
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn("argv", start_event)
|
||||
self.assertTrue(isinstance(start_event["argv"], list))
|
||||
|
||||
def test_exit_event_result_none(self):
|
||||
"""Test 'exit' event data is valid when result is None.
|
||||
|
||||
We expect None result to be converted to 0 in the exit event data.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(None)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(exit_event, expected_event_name="exit")
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn("code", exit_event)
|
||||
# 'None' result should convert to 0 (successful) return code.
|
||||
self.assertEqual(exit_event["code"], 0)
|
||||
|
||||
def test_exit_event_result_integer(self):
|
||||
"""Test 'exit' event data is valid when result is an integer.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<exit event>
|
||||
"""
|
||||
self._event_log_module.ExitEvent(2)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
exit_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(exit_event, expected_event_name="exit")
|
||||
# Check for 'exit' event specific fields.
|
||||
self.assertIn("code", exit_event)
|
||||
self.assertEqual(exit_event["code"], 2)
|
||||
|
||||
def test_command_event(self):
|
||||
"""Test and validate 'command' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<command event>
|
||||
"""
|
||||
name = "repo"
|
||||
subcommands = ["init" "this"]
|
||||
self._event_log_module.CommandEvent(
|
||||
name="repo", subcommands=subcommands
|
||||
)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
command_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(command_event, expected_event_name="command")
|
||||
# Check for 'command' event specific fields.
|
||||
self.assertIn("name", command_event)
|
||||
self.assertIn("subcommands", command_event)
|
||||
self.assertEqual(command_event["name"], name)
|
||||
self.assertEqual(command_event["subcommands"], subcommands)
|
||||
|
||||
def test_def_params_event_repo_config(self):
|
||||
"""Test 'def_params' event data outputs only repo config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<def_param event>
|
||||
<def_param event>
|
||||
"""
|
||||
config = {
|
||||
"git.foo": "bar",
|
||||
"repo.partialclone": "true",
|
||||
"repo.partialclonefilter": "blob:none",
|
||||
}
|
||||
self._event_log_module.DefParamRepoEvents(config)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 3)
|
||||
def_param_events = self._log_data[1:]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
|
||||
for event in def_param_events:
|
||||
self.verifyCommonKeys(event, expected_event_name="def_param")
|
||||
# Check for 'def_param' event specific fields.
|
||||
self.assertIn("param", event)
|
||||
self.assertIn("value", event)
|
||||
self.assertTrue(event["param"].startswith("repo."))
|
||||
|
||||
def test_def_params_event_no_repo_config(self):
|
||||
"""Test 'def_params' event data won't output non-repo config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
"""
|
||||
config = {
|
||||
"git.foo": "bar",
|
||||
"git.core.foo2": "baz",
|
||||
}
|
||||
self._event_log_module.DefParamRepoEvents(config)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 1)
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
|
||||
def test_data_event_config(self):
|
||||
"""Test 'data' event data outputs all config keys.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<data event>
|
||||
<data event>
|
||||
"""
|
||||
config = {
|
||||
"git.foo": "bar",
|
||||
"repo.partialclone": "false",
|
||||
"repo.syncstate.superproject.hassuperprojecttag": "true",
|
||||
"repo.syncstate.superproject.sys.argv": ["--", "sync", "protobuf"],
|
||||
}
|
||||
prefix_value = "prefix"
|
||||
self._event_log_module.LogDataConfigEvents(config, prefix_value)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 5)
|
||||
data_events = self._log_data[1:]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
|
||||
for event in data_events:
|
||||
self.verifyCommonKeys(event)
|
||||
# Check for 'data' event specific fields.
|
||||
self.assertIn("key", event)
|
||||
self.assertIn("value", event)
|
||||
key = event["key"]
|
||||
key = self.remove_prefix(key, f"{prefix_value}/")
|
||||
value = event["value"]
|
||||
self.assertEqual(
|
||||
self._event_log_module.GetDataEventName(value), event["event"]
|
||||
)
|
||||
self.assertTrue(key in config and value == config[key])
|
||||
|
||||
def test_error_event(self):
|
||||
"""Test and validate 'error' event data is valid.
|
||||
|
||||
Expected event log:
|
||||
<version event>
|
||||
<error event>
|
||||
"""
|
||||
msg = "invalid option: --cahced"
|
||||
fmt = "invalid option: %s"
|
||||
self._event_log_module.ErrorEvent(msg, fmt)
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
log_path = self._event_log_module.Write(path=tempdir)
|
||||
self._log_data = self.readLog(log_path)
|
||||
|
||||
self.assertEqual(len(self._log_data), 2)
|
||||
error_event = self._log_data[1]
|
||||
self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
|
||||
self.verifyCommonKeys(error_event, expected_event_name="error")
|
||||
# Check for 'error' event specific fields.
|
||||
self.assertIn("msg", error_event)
|
||||
self.assertIn("fmt", error_event)
|
||||
self.assertEqual(error_event["msg"], msg)
|
||||
self.assertEqual(error_event["fmt"], fmt)
|
||||
|
||||
def test_write_with_filename(self):
|
||||
"""Test Write() with a path to a file exits with None."""
|
||||
self.assertIsNone(self._event_log_module.Write(path="path/to/file"))
|
||||
|
||||
def test_write_with_git_config(self):
|
||||
"""Test Write() uses the git config path when 'git config' call
|
||||
succeeds."""
|
||||
with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
|
||||
with mock.patch.object(
|
||||
self._event_log_module,
|
||||
"_GetEventTargetPath",
|
||||
return_value=tempdir,
|
||||
):
|
||||
self.assertEqual(
|
||||
os.path.dirname(self._event_log_module.Write()), tempdir
|
||||
)
|
||||
|
||||
def test_write_no_git_config(self):
|
||||
"""Test Write() with no git config variable present exits with None."""
|
||||
with mock.patch.object(
|
||||
self._event_log_module, "_GetEventTargetPath", return_value=None
|
||||
):
|
||||
self.assertIsNone(self._event_log_module.Write())
|
||||
|
||||
def test_write_non_string(self):
|
||||
"""Test Write() with non-string type for |path| throws TypeError."""
|
||||
with self.assertRaises(TypeError):
|
||||
self._event_log_module.Write(path=1234)
|
||||
|
||||
def test_write_socket(self):
|
||||
"""Test Write() with Unix domain socket for |path| and validate received
|
||||
traces."""
|
||||
received_traces = []
|
||||
with tempfile.TemporaryDirectory(
|
||||
prefix="test_server_sockets"
|
||||
) as tempdir:
|
||||
socket_path = os.path.join(tempdir, "server.sock")
|
||||
server_ready = threading.Condition()
|
||||
# Start "server" listening on Unix domain socket at socket_path.
|
||||
try:
|
||||
server_thread = threading.Thread(
|
||||
target=serverLoggingThread,
|
||||
args=(socket_path, server_ready, received_traces),
|
||||
)
|
||||
server_thread.start()
|
||||
|
||||
with server_ready:
|
||||
server_ready.wait(timeout=120)
|
||||
|
||||
self._event_log_module.StartEvent()
|
||||
path = self._event_log_module.Write(
|
||||
path=f"af_unix:{socket_path}"
|
||||
)
|
||||
finally:
|
||||
server_thread.join(timeout=5)
|
||||
|
||||
self.assertEqual(path, f"af_unix:stream:{socket_path}")
|
||||
self.assertEqual(len(received_traces), 2)
|
||||
version_event = json.loads(received_traces[0])
|
||||
start_event = json.loads(received_traces[1])
|
||||
self.verifyCommonKeys(version_event, expected_event_name="version")
|
||||
self.verifyCommonKeys(start_event, expected_event_name="start")
|
||||
# Check for 'start' event specific fields.
|
||||
self.assertIn("argv", start_event)
|
||||
self.assertIsInstance(start_event["argv"], list)
|
||||
|
@ -17,39 +17,38 @@
|
||||
import hooks
|
||||
import unittest
|
||||
|
||||
|
||||
class RepoHookShebang(unittest.TestCase):
|
||||
"""Check shebang parsing in RepoHook."""
|
||||
"""Check shebang parsing in RepoHook."""
|
||||
|
||||
def test_no_shebang(self):
|
||||
"""Lines w/out shebangs should be rejected."""
|
||||
DATA = (
|
||||
'',
|
||||
'#\n# foo\n',
|
||||
'# Bad shebang in script\n#!/foo\n'
|
||||
)
|
||||
for data in DATA:
|
||||
self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
|
||||
def test_no_shebang(self):
|
||||
"""Lines w/out shebangs should be rejected."""
|
||||
DATA = ("", "#\n# foo\n", "# Bad shebang in script\n#!/foo\n")
|
||||
for data in DATA:
|
||||
self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
|
||||
|
||||
def test_direct_interp(self):
|
||||
"""Lines whose shebang points directly to the interpreter."""
|
||||
DATA = (
|
||||
('#!/foo', '/foo'),
|
||||
('#! /foo', '/foo'),
|
||||
('#!/bin/foo ', '/bin/foo'),
|
||||
('#! /usr/foo ', '/usr/foo'),
|
||||
('#! /usr/foo -args', '/usr/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
def test_direct_interp(self):
|
||||
"""Lines whose shebang points directly to the interpreter."""
|
||||
DATA = (
|
||||
("#!/foo", "/foo"),
|
||||
("#! /foo", "/foo"),
|
||||
("#!/bin/foo ", "/bin/foo"),
|
||||
("#! /usr/foo ", "/usr/foo"),
|
||||
("#! /usr/foo -args", "/usr/foo"),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(
|
||||
hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
|
||||
)
|
||||
|
||||
def test_env_interp(self):
|
||||
"""Lines whose shebang launches through `env`."""
|
||||
DATA = (
|
||||
('#!/usr/bin/env foo', 'foo'),
|
||||
('#!/bin/env foo', 'foo'),
|
||||
('#! /bin/env /bin/foo ', '/bin/foo'),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang),
|
||||
interp)
|
||||
def test_env_interp(self):
|
||||
"""Lines whose shebang launches through `env`."""
|
||||
DATA = (
|
||||
("#!/usr/bin/env foo", "foo"),
|
||||
("#!/bin/env foo", "foo"),
|
||||
("#! /bin/env /bin/foo ", "/bin/foo"),
|
||||
)
|
||||
for shebang, interp in DATA:
|
||||
self.assertEqual(
|
||||
hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
|
||||
)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -22,29 +22,31 @@ import platform_utils
|
||||
|
||||
|
||||
class RemoveTests(unittest.TestCase):
|
||||
"""Check remove() helper."""
|
||||
"""Check remove() helper."""
|
||||
|
||||
def testMissingOk(self):
|
||||
"""Check missing_ok handling."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = os.path.join(tmpdir, 'test')
|
||||
def testMissingOk(self):
|
||||
"""Check missing_ok handling."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = os.path.join(tmpdir, "test")
|
||||
|
||||
# Should not fail.
|
||||
platform_utils.remove(path, missing_ok=True)
|
||||
# Should not fail.
|
||||
platform_utils.remove(path, missing_ok=True)
|
||||
|
||||
# Should fail.
|
||||
self.assertRaises(OSError, platform_utils.remove, path)
|
||||
self.assertRaises(OSError, platform_utils.remove, path, missing_ok=False)
|
||||
# Should fail.
|
||||
self.assertRaises(OSError, platform_utils.remove, path)
|
||||
self.assertRaises(
|
||||
OSError, platform_utils.remove, path, missing_ok=False
|
||||
)
|
||||
|
||||
# Should not fail if it exists.
|
||||
open(path, 'w').close()
|
||||
platform_utils.remove(path, missing_ok=True)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
# Should not fail if it exists.
|
||||
open(path, "w").close()
|
||||
platform_utils.remove(path, missing_ok=True)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
||||
open(path, 'w').close()
|
||||
platform_utils.remove(path)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
open(path, "w").close()
|
||||
platform_utils.remove(path)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
||||
open(path, 'w').close()
|
||||
platform_utils.remove(path, missing_ok=False)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
open(path, "w").close()
|
||||
platform_utils.remove(path, missing_ok=False)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
@ -31,452 +31,493 @@ import project
|
||||
|
||||
@contextlib.contextmanager
|
||||
def TempGitTree():
|
||||
"""Create a new empty git checkout for testing."""
|
||||
with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir:
|
||||
# Tests need to assume, that main is default branch at init,
|
||||
# which is not supported in config until 2.28.
|
||||
cmd = ['git', 'init']
|
||||
if git_command.git_require((2, 28, 0)):
|
||||
cmd += ['--initial-branch=main']
|
||||
else:
|
||||
# Use template dir for init.
|
||||
templatedir = tempfile.mkdtemp(prefix='.test-template')
|
||||
with open(os.path.join(templatedir, 'HEAD'), 'w') as fp:
|
||||
fp.write('ref: refs/heads/main\n')
|
||||
cmd += ['--template', templatedir]
|
||||
subprocess.check_call(cmd, cwd=tempdir)
|
||||
yield tempdir
|
||||
"""Create a new empty git checkout for testing."""
|
||||
with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
|
||||
# Tests need to assume, that main is default branch at init,
|
||||
# which is not supported in config until 2.28.
|
||||
cmd = ["git", "init"]
|
||||
if git_command.git_require((2, 28, 0)):
|
||||
cmd += ["--initial-branch=main"]
|
||||
else:
|
||||
# Use template dir for init.
|
||||
templatedir = tempfile.mkdtemp(prefix=".test-template")
|
||||
with open(os.path.join(templatedir, "HEAD"), "w") as fp:
|
||||
fp.write("ref: refs/heads/main\n")
|
||||
cmd += ["--template", templatedir]
|
||||
subprocess.check_call(cmd, cwd=tempdir)
|
||||
yield tempdir
|
||||
|
||||
|
||||
class FakeProject(object):
|
||||
"""A fake for Project for basic functionality."""
|
||||
"""A fake for Project for basic functionality."""
|
||||
|
||||
def __init__(self, worktree):
|
||||
self.worktree = worktree
|
||||
self.gitdir = os.path.join(worktree, '.git')
|
||||
self.name = 'fakeproject'
|
||||
self.work_git = project.Project._GitGetByExec(
|
||||
self, bare=False, gitdir=self.gitdir)
|
||||
self.bare_git = project.Project._GitGetByExec(
|
||||
self, bare=True, gitdir=self.gitdir)
|
||||
self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
|
||||
def __init__(self, worktree):
|
||||
self.worktree = worktree
|
||||
self.gitdir = os.path.join(worktree, ".git")
|
||||
self.name = "fakeproject"
|
||||
self.work_git = project.Project._GitGetByExec(
|
||||
self, bare=False, gitdir=self.gitdir
|
||||
)
|
||||
self.bare_git = project.Project._GitGetByExec(
|
||||
self, bare=True, gitdir=self.gitdir
|
||||
)
|
||||
self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
|
||||
|
||||
|
||||
class ReviewableBranchTests(unittest.TestCase):
|
||||
"""Check ReviewableBranch behavior."""
|
||||
"""Check ReviewableBranch behavior."""
|
||||
|
||||
def test_smoke(self):
|
||||
"""A quick run through everything."""
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = FakeProject(tempdir)
|
||||
def test_smoke(self):
|
||||
"""A quick run through everything."""
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = FakeProject(tempdir)
|
||||
|
||||
# Generate some commits.
|
||||
with open(os.path.join(tempdir, 'readme'), 'w') as fp:
|
||||
fp.write('txt')
|
||||
fakeproj.work_git.add('readme')
|
||||
fakeproj.work_git.commit('-mAdd file')
|
||||
fakeproj.work_git.checkout('-b', 'work')
|
||||
fakeproj.work_git.rm('-f', 'readme')
|
||||
fakeproj.work_git.commit('-mDel file')
|
||||
# Generate some commits.
|
||||
with open(os.path.join(tempdir, "readme"), "w") as fp:
|
||||
fp.write("txt")
|
||||
fakeproj.work_git.add("readme")
|
||||
fakeproj.work_git.commit("-mAdd file")
|
||||
fakeproj.work_git.checkout("-b", "work")
|
||||
fakeproj.work_git.rm("-f", "readme")
|
||||
fakeproj.work_git.commit("-mDel file")
|
||||
|
||||
# Start off with the normal details.
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'main')
|
||||
self.assertEqual('work', rb.name)
|
||||
self.assertEqual(1, len(rb.commits))
|
||||
self.assertIn('Del file', rb.commits[0])
|
||||
d = rb.unabbrev_commits
|
||||
self.assertEqual(1, len(d))
|
||||
short, long = next(iter(d.items()))
|
||||
self.assertTrue(long.startswith(short))
|
||||
self.assertTrue(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
# Start off with the normal details.
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch("work"), "main"
|
||||
)
|
||||
self.assertEqual("work", rb.name)
|
||||
self.assertEqual(1, len(rb.commits))
|
||||
self.assertIn("Del file", rb.commits[0])
|
||||
d = rb.unabbrev_commits
|
||||
self.assertEqual(1, len(d))
|
||||
short, long = next(iter(d.items()))
|
||||
self.assertTrue(long.startswith(short))
|
||||
self.assertTrue(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
# Now delete the tracking branch!
|
||||
fakeproj.work_git.branch('-D', 'main')
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch('work'), 'main')
|
||||
self.assertEqual(0, len(rb.commits))
|
||||
self.assertFalse(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
# Now delete the tracking branch!
|
||||
fakeproj.work_git.branch("-D", "main")
|
||||
rb = project.ReviewableBranch(
|
||||
fakeproj, fakeproj.config.GetBranch("work"), "main"
|
||||
)
|
||||
self.assertEqual(0, len(rb.commits))
|
||||
self.assertFalse(rb.base_exists)
|
||||
# Hard to assert anything useful about this.
|
||||
self.assertTrue(rb.date)
|
||||
|
||||
|
||||
class CopyLinkTestCase(unittest.TestCase):
|
||||
"""TestCase for stub repo client checkouts.
|
||||
"""TestCase for stub repo client checkouts.
|
||||
|
||||
It'll have a layout like this:
|
||||
tempdir/ # self.tempdir
|
||||
checkout/ # self.topdir
|
||||
git-project/ # self.worktree
|
||||
It'll have a layout like this:
|
||||
tempdir/ # self.tempdir
|
||||
checkout/ # self.topdir
|
||||
git-project/ # self.worktree
|
||||
|
||||
Attributes:
|
||||
tempdir: A dedicated temporary directory.
|
||||
worktree: The top of the repo client checkout.
|
||||
topdir: The top of a project checkout.
|
||||
"""
|
||||
Attributes:
|
||||
tempdir: A dedicated temporary directory.
|
||||
worktree: The top of the repo client checkout.
|
||||
topdir: The top of a project checkout.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests')
|
||||
self.tempdir = self.tempdirobj.name
|
||||
self.topdir = os.path.join(self.tempdir, 'checkout')
|
||||
self.worktree = os.path.join(self.topdir, 'git-project')
|
||||
os.makedirs(self.topdir)
|
||||
os.makedirs(self.worktree)
|
||||
def setUp(self):
|
||||
self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
|
||||
self.tempdir = self.tempdirobj.name
|
||||
self.topdir = os.path.join(self.tempdir, "checkout")
|
||||
self.worktree = os.path.join(self.topdir, "git-project")
|
||||
os.makedirs(self.topdir)
|
||||
os.makedirs(self.worktree)
|
||||
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
def tearDown(self):
|
||||
self.tempdirobj.cleanup()
|
||||
|
||||
@staticmethod
|
||||
def touch(path):
|
||||
with open(path, 'w'):
|
||||
pass
|
||||
@staticmethod
|
||||
def touch(path):
|
||||
with open(path, "w"):
|
||||
pass
|
||||
|
||||
def assertExists(self, path, msg=None):
|
||||
"""Make sure |path| exists."""
|
||||
if os.path.exists(path):
|
||||
return
|
||||
def assertExists(self, path, msg=None):
|
||||
"""Make sure |path| exists."""
|
||||
if os.path.exists(path):
|
||||
return
|
||||
|
||||
if msg is None:
|
||||
msg = ['path is missing: %s' % path]
|
||||
while path != '/':
|
||||
path = os.path.dirname(path)
|
||||
if not path:
|
||||
# If we're given something like "foo", abort once we get to "".
|
||||
break
|
||||
result = os.path.exists(path)
|
||||
msg.append('\tos.path.exists(%s): %s' % (path, result))
|
||||
if result:
|
||||
msg.append('\tcontents: %r' % os.listdir(path))
|
||||
break
|
||||
msg = '\n'.join(msg)
|
||||
if msg is None:
|
||||
msg = ["path is missing: %s" % path]
|
||||
while path != "/":
|
||||
path = os.path.dirname(path)
|
||||
if not path:
|
||||
# If we're given something like "foo", abort once we get to
|
||||
# "".
|
||||
break
|
||||
result = os.path.exists(path)
|
||||
msg.append("\tos.path.exists(%s): %s" % (path, result))
|
||||
if result:
|
||||
msg.append("\tcontents: %r" % os.listdir(path))
|
||||
break
|
||||
msg = "\n".join(msg)
|
||||
|
||||
raise self.failureException(msg)
|
||||
raise self.failureException(msg)
|
||||
|
||||
|
||||
class CopyFile(CopyLinkTestCase):
|
||||
"""Check _CopyFile handling."""
|
||||
"""Check _CopyFile handling."""
|
||||
|
||||
def CopyFile(self, src, dest):
|
||||
return project._CopyFile(self.worktree, src, self.topdir, dest)
|
||||
def CopyFile(self, src, dest):
|
||||
return project._CopyFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of copying a file from a project to the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'foo')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
def test_basic(self):
|
||||
"""Basic test of copying a file from a project to the toplevel."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
cf = self.CopyFile("foo.txt", "foo")
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, "foo"))
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Copy a file from a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('bar/foo.txt', 'new.txt')
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'new.txt'))
|
||||
def test_src_subdir(self):
|
||||
"""Copy a file from a subdir of a project."""
|
||||
src = os.path.join(self.worktree, "bar", "foo.txt")
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
cf = self.CopyFile("bar/foo.txt", "new.txt")
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, "new.txt"))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Copy a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
|
||||
def test_dest_subdir(self):
|
||||
"""Copy a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
cf = self.CopyFile("foo.txt", "sub/dir/new.txt")
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
|
||||
cf._Copy()
|
||||
self.assertExists(os.path.join(self.topdir, "sub", "dir", "new.txt"))
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed files get copied again."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
dest = os.path.join(self.topdir, 'bar')
|
||||
with open(src, 'w') as f:
|
||||
f.write('1st')
|
||||
cf = self.CopyFile('foo.txt', 'bar')
|
||||
cf._Copy()
|
||||
self.assertExists(dest)
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '1st')
|
||||
def test_update(self):
|
||||
"""Make sure changed files get copied again."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
dest = os.path.join(self.topdir, "bar")
|
||||
with open(src, "w") as f:
|
||||
f.write("1st")
|
||||
cf = self.CopyFile("foo.txt", "bar")
|
||||
cf._Copy()
|
||||
self.assertExists(dest)
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), "1st")
|
||||
|
||||
with open(src, 'w') as f:
|
||||
f.write('2nd!')
|
||||
cf._Copy()
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), '2nd!')
|
||||
with open(src, "w") as f:
|
||||
f.write("2nd!")
|
||||
cf._Copy()
|
||||
with open(dest) as f:
|
||||
self.assertEqual(f.read(), "2nd!")
|
||||
|
||||
def test_src_block_symlink(self):
|
||||
"""Do not allow reading from a symlinked path."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
sym = os.path.join(self.worktree, 'sym')
|
||||
self.touch(src)
|
||||
platform_utils.symlink('foo.txt', sym)
|
||||
self.assertExists(sym)
|
||||
cf = self.CopyFile('sym', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_src_block_symlink(self):
|
||||
"""Do not allow reading from a symlinked path."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
sym = os.path.join(self.worktree, "sym")
|
||||
self.touch(src)
|
||||
platform_utils.symlink("foo.txt", sym)
|
||||
self.assertExists(sym)
|
||||
cf = self.CopyFile("sym", "foo")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_symlink_traversal(self):
|
||||
"""Do not allow reading through a symlink dir."""
|
||||
realfile = os.path.join(self.tempdir, 'file.txt')
|
||||
self.touch(realfile)
|
||||
src = os.path.join(self.worktree, 'bar', 'file.txt')
|
||||
platform_utils.symlink(self.tempdir, os.path.join(self.worktree, 'bar'))
|
||||
self.assertExists(src)
|
||||
cf = self.CopyFile('bar/file.txt', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_src_block_symlink_traversal(self):
|
||||
"""Do not allow reading through a symlink dir."""
|
||||
realfile = os.path.join(self.tempdir, "file.txt")
|
||||
self.touch(realfile)
|
||||
src = os.path.join(self.worktree, "bar", "file.txt")
|
||||
platform_utils.symlink(self.tempdir, os.path.join(self.worktree, "bar"))
|
||||
self.assertExists(src)
|
||||
cf = self.CopyFile("bar/file.txt", "foo")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_from_dir(self):
|
||||
"""Do not allow copying from a directory."""
|
||||
src = os.path.join(self.worktree, 'dir')
|
||||
os.makedirs(src)
|
||||
cf = self.CopyFile('dir', 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_src_block_copy_from_dir(self):
|
||||
"""Do not allow copying from a directory."""
|
||||
src = os.path.join(self.worktree, "dir")
|
||||
os.makedirs(src)
|
||||
cf = self.CopyFile("dir", "foo")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink(self):
|
||||
"""Do not allow writing to a symlink."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
platform_utils.symlink('dest', os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_dest_block_symlink(self):
|
||||
"""Do not allow writing to a symlink."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
platform_utils.symlink("dest", os.path.join(self.topdir, "sym"))
|
||||
cf = self.CopyFile("foo.txt", "sym")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_dest_block_symlink_traversal(self):
|
||||
"""Do not allow writing through a symlink dir."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
platform_utils.symlink(tempfile.gettempdir(),
|
||||
os.path.join(self.topdir, 'sym'))
|
||||
cf = self.CopyFile('foo.txt', 'sym/foo.txt')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_dest_block_symlink_traversal(self):
|
||||
"""Do not allow writing through a symlink dir."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
platform_utils.symlink(
|
||||
tempfile.gettempdir(), os.path.join(self.topdir, "sym")
|
||||
)
|
||||
cf = self.CopyFile("foo.txt", "sym/foo.txt")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
def test_src_block_copy_to_dir(self):
|
||||
"""Do not allow copying to a directory."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
os.makedirs(os.path.join(self.topdir, 'dir'))
|
||||
cf = self.CopyFile('foo.txt', 'dir')
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
def test_src_block_copy_to_dir(self):
|
||||
"""Do not allow copying to a directory."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
os.makedirs(os.path.join(self.topdir, "dir"))
|
||||
cf = self.CopyFile("foo.txt", "dir")
|
||||
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||
|
||||
|
||||
class LinkFile(CopyLinkTestCase):
|
||||
"""Check _LinkFile handling."""
|
||||
"""Check _LinkFile handling."""
|
||||
|
||||
def LinkFile(self, src, dest):
|
||||
return project._LinkFile(self.worktree, src, self.topdir, dest)
|
||||
def LinkFile(self, src, dest):
|
||||
return project._LinkFile(self.worktree, src, self.topdir, dest)
|
||||
|
||||
def test_basic(self):
|
||||
"""Basic test of linking a file from a project into the toplevel."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'foo')
|
||||
lf._Link()
|
||||
dest = os.path.join(self.topdir, 'foo')
|
||||
self.assertExists(dest)
|
||||
self.assertTrue(os.path.islink(dest))
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
def test_basic(self):
|
||||
"""Basic test of linking a file from a project into the toplevel."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
lf = self.LinkFile("foo.txt", "foo")
|
||||
lf._Link()
|
||||
dest = os.path.join(self.topdir, "foo")
|
||||
self.assertExists(dest)
|
||||
self.assertTrue(os.path.islink(dest))
|
||||
self.assertEqual(
|
||||
os.path.join("git-project", "foo.txt"), os.readlink(dest)
|
||||
)
|
||||
|
||||
def test_src_subdir(self):
|
||||
"""Link to a file in a subdir of a project."""
|
||||
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('bar/foo.txt', 'foo')
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||
def test_src_subdir(self):
|
||||
"""Link to a file in a subdir of a project."""
|
||||
src = os.path.join(self.worktree, "bar", "foo.txt")
|
||||
os.makedirs(os.path.dirname(src))
|
||||
self.touch(src)
|
||||
lf = self.LinkFile("bar/foo.txt", "foo")
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, "foo"))
|
||||
|
||||
def test_src_self(self):
|
||||
"""Link to the project itself."""
|
||||
dest = os.path.join(self.topdir, 'foo', 'bar')
|
||||
lf = self.LinkFile('.', 'foo/bar')
|
||||
lf._Link()
|
||||
self.assertExists(dest)
|
||||
self.assertEqual(os.path.join('..', 'git-project'), os.readlink(dest))
|
||||
def test_src_self(self):
|
||||
"""Link to the project itself."""
|
||||
dest = os.path.join(self.topdir, "foo", "bar")
|
||||
lf = self.LinkFile(".", "foo/bar")
|
||||
lf._Link()
|
||||
self.assertExists(dest)
|
||||
self.assertEqual(os.path.join("..", "git-project"), os.readlink(dest))
|
||||
|
||||
def test_dest_subdir(self):
|
||||
"""Link a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
|
||||
def test_dest_subdir(self):
|
||||
"""Link a file to a subdir of a checkout."""
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
lf = self.LinkFile("foo.txt", "sub/dir/foo/bar")
|
||||
self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
|
||||
lf._Link()
|
||||
self.assertExists(os.path.join(self.topdir, "sub", "dir", "foo", "bar"))
|
||||
|
||||
def test_src_block_relative(self):
|
||||
"""Do not allow relative symlinks."""
|
||||
BAD_SOURCES = (
|
||||
'./',
|
||||
'..',
|
||||
'../',
|
||||
'foo/.',
|
||||
'foo/./bar',
|
||||
'foo/..',
|
||||
'foo/../foo',
|
||||
)
|
||||
for src in BAD_SOURCES:
|
||||
lf = self.LinkFile(src, 'foo')
|
||||
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
|
||||
def test_src_block_relative(self):
|
||||
"""Do not allow relative symlinks."""
|
||||
BAD_SOURCES = (
|
||||
"./",
|
||||
"..",
|
||||
"../",
|
||||
"foo/.",
|
||||
"foo/./bar",
|
||||
"foo/..",
|
||||
"foo/../foo",
|
||||
)
|
||||
for src in BAD_SOURCES:
|
||||
lf = self.LinkFile(src, "foo")
|
||||
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
|
||||
|
||||
def test_update(self):
|
||||
"""Make sure changed targets get updated."""
|
||||
dest = os.path.join(self.topdir, 'sym')
|
||||
def test_update(self):
|
||||
"""Make sure changed targets get updated."""
|
||||
dest = os.path.join(self.topdir, "sym")
|
||||
|
||||
src = os.path.join(self.worktree, 'foo.txt')
|
||||
self.touch(src)
|
||||
lf = self.LinkFile('foo.txt', 'sym')
|
||||
lf._Link()
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
src = os.path.join(self.worktree, "foo.txt")
|
||||
self.touch(src)
|
||||
lf = self.LinkFile("foo.txt", "sym")
|
||||
lf._Link()
|
||||
self.assertEqual(
|
||||
os.path.join("git-project", "foo.txt"), os.readlink(dest)
|
||||
)
|
||||
|
||||
# Point the symlink somewhere else.
|
||||
os.unlink(dest)
|
||||
platform_utils.symlink(self.tempdir, dest)
|
||||
lf._Link()
|
||||
self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest))
|
||||
# Point the symlink somewhere else.
|
||||
os.unlink(dest)
|
||||
platform_utils.symlink(self.tempdir, dest)
|
||||
lf._Link()
|
||||
self.assertEqual(
|
||||
os.path.join("git-project", "foo.txt"), os.readlink(dest)
|
||||
)
|
||||
|
||||
|
||||
class MigrateWorkTreeTests(unittest.TestCase):
|
||||
"""Check _MigrateOldWorkTreeGitDir handling."""
|
||||
"""Check _MigrateOldWorkTreeGitDir handling."""
|
||||
|
||||
_SYMLINKS = {
|
||||
'config', 'description', 'hooks', 'info', 'logs', 'objects',
|
||||
'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn',
|
||||
}
|
||||
_FILES = {
|
||||
'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'index', 'ORIG_HEAD',
|
||||
'unknown-file-should-be-migrated',
|
||||
}
|
||||
_CLEAN_FILES = {
|
||||
'a-vim-temp-file~', '#an-emacs-temp-file#',
|
||||
}
|
||||
_SYMLINKS = {
|
||||
"config",
|
||||
"description",
|
||||
"hooks",
|
||||
"info",
|
||||
"logs",
|
||||
"objects",
|
||||
"packed-refs",
|
||||
"refs",
|
||||
"rr-cache",
|
||||
"shallow",
|
||||
"svn",
|
||||
}
|
||||
_FILES = {
|
||||
"COMMIT_EDITMSG",
|
||||
"FETCH_HEAD",
|
||||
"HEAD",
|
||||
"index",
|
||||
"ORIG_HEAD",
|
||||
"unknown-file-should-be-migrated",
|
||||
}
|
||||
_CLEAN_FILES = {
|
||||
"a-vim-temp-file~",
|
||||
"#an-emacs-temp-file#",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@contextlib.contextmanager
|
||||
def _simple_layout(cls):
|
||||
"""Create a simple repo client checkout to test against."""
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
tempdir = Path(tempdir)
|
||||
@classmethod
|
||||
@contextlib.contextmanager
|
||||
def _simple_layout(cls):
|
||||
"""Create a simple repo client checkout to test against."""
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
tempdir = Path(tempdir)
|
||||
|
||||
gitdir = tempdir / '.repo/projects/src/test.git'
|
||||
gitdir.mkdir(parents=True)
|
||||
cmd = ['git', 'init', '--bare', str(gitdir)]
|
||||
subprocess.check_call(cmd)
|
||||
gitdir = tempdir / ".repo/projects/src/test.git"
|
||||
gitdir.mkdir(parents=True)
|
||||
cmd = ["git", "init", "--bare", str(gitdir)]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
dotgit = tempdir / 'src/test/.git'
|
||||
dotgit.mkdir(parents=True)
|
||||
for name in cls._SYMLINKS:
|
||||
(dotgit / name).symlink_to(f'../../../.repo/projects/src/test.git/{name}')
|
||||
for name in cls._FILES | cls._CLEAN_FILES:
|
||||
(dotgit / name).write_text(name)
|
||||
dotgit = tempdir / "src/test/.git"
|
||||
dotgit.mkdir(parents=True)
|
||||
for name in cls._SYMLINKS:
|
||||
(dotgit / name).symlink_to(
|
||||
f"../../../.repo/projects/src/test.git/{name}"
|
||||
)
|
||||
for name in cls._FILES | cls._CLEAN_FILES:
|
||||
(dotgit / name).write_text(name)
|
||||
|
||||
yield tempdir
|
||||
yield tempdir
|
||||
|
||||
def test_standard(self):
|
||||
"""Migrate a standard checkout that we expect."""
|
||||
with self._simple_layout() as tempdir:
|
||||
dotgit = tempdir / 'src/test/.git'
|
||||
project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
|
||||
def test_standard(self):
|
||||
"""Migrate a standard checkout that we expect."""
|
||||
with self._simple_layout() as tempdir:
|
||||
dotgit = tempdir / "src/test/.git"
|
||||
project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
|
||||
|
||||
# Make sure the dir was transformed into a symlink.
|
||||
self.assertTrue(dotgit.is_symlink())
|
||||
self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git'))
|
||||
# Make sure the dir was transformed into a symlink.
|
||||
self.assertTrue(dotgit.is_symlink())
|
||||
self.assertEqual(
|
||||
os.readlink(dotgit),
|
||||
os.path.normpath("../../.repo/projects/src/test.git"),
|
||||
)
|
||||
|
||||
# Make sure files were moved over.
|
||||
gitdir = tempdir / '.repo/projects/src/test.git'
|
||||
for name in self._FILES:
|
||||
self.assertEqual(name, (gitdir / name).read_text())
|
||||
# Make sure files were removed.
|
||||
for name in self._CLEAN_FILES:
|
||||
self.assertFalse((gitdir / name).exists())
|
||||
# Make sure files were moved over.
|
||||
gitdir = tempdir / ".repo/projects/src/test.git"
|
||||
for name in self._FILES:
|
||||
self.assertEqual(name, (gitdir / name).read_text())
|
||||
# Make sure files were removed.
|
||||
for name in self._CLEAN_FILES:
|
||||
self.assertFalse((gitdir / name).exists())
|
||||
|
||||
def test_unknown(self):
|
||||
"""A checkout with unknown files should abort."""
|
||||
with self._simple_layout() as tempdir:
|
||||
dotgit = tempdir / 'src/test/.git'
|
||||
(tempdir / '.repo/projects/src/test.git/random-file').write_text('one')
|
||||
(dotgit / 'random-file').write_text('two')
|
||||
with self.assertRaises(error.GitError):
|
||||
project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
|
||||
def test_unknown(self):
|
||||
"""A checkout with unknown files should abort."""
|
||||
with self._simple_layout() as tempdir:
|
||||
dotgit = tempdir / "src/test/.git"
|
||||
(tempdir / ".repo/projects/src/test.git/random-file").write_text(
|
||||
"one"
|
||||
)
|
||||
(dotgit / "random-file").write_text("two")
|
||||
with self.assertRaises(error.GitError):
|
||||
project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
|
||||
|
||||
# Make sure no content was actually changed.
|
||||
self.assertTrue(dotgit.is_dir())
|
||||
for name in self._FILES:
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._CLEAN_FILES:
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._SYMLINKS:
|
||||
self.assertTrue((dotgit / name).is_symlink())
|
||||
# Make sure no content was actually changed.
|
||||
self.assertTrue(dotgit.is_dir())
|
||||
for name in self._FILES:
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._CLEAN_FILES:
|
||||
self.assertTrue((dotgit / name).is_file())
|
||||
for name in self._SYMLINKS:
|
||||
self.assertTrue((dotgit / name).is_symlink())
|
||||
|
||||
|
||||
class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
|
||||
"""Ensure properties are fetched properly."""
|
||||
"""Ensure properties are fetched properly."""
|
||||
|
||||
def setUpManifest(self, tempdir):
|
||||
repodir = os.path.join(tempdir, '.repo')
|
||||
manifest_dir = os.path.join(repodir, 'manifests')
|
||||
manifest_file = os.path.join(
|
||||
repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
local_manifest_dir = os.path.join(
|
||||
repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME)
|
||||
os.mkdir(repodir)
|
||||
os.mkdir(manifest_dir)
|
||||
manifest = manifest_xml.XmlManifest(repodir, manifest_file)
|
||||
def setUpManifest(self, tempdir):
|
||||
repodir = os.path.join(tempdir, ".repo")
|
||||
manifest_dir = os.path.join(repodir, "manifests")
|
||||
manifest_file = os.path.join(repodir, manifest_xml.MANIFEST_FILE_NAME)
|
||||
os.mkdir(repodir)
|
||||
os.mkdir(manifest_dir)
|
||||
manifest = manifest_xml.XmlManifest(repodir, manifest_file)
|
||||
|
||||
return project.ManifestProject(
|
||||
manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir)
|
||||
return project.ManifestProject(
|
||||
manifest, "test/manifest", os.path.join(tempdir, ".git"), tempdir
|
||||
)
|
||||
|
||||
def test_manifest_config_properties(self):
|
||||
"""Test we are fetching the manifest config properties correctly."""
|
||||
def test_manifest_config_properties(self):
|
||||
"""Test we are fetching the manifest config properties correctly."""
|
||||
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = self.setUpManifest(tempdir)
|
||||
with TempGitTree() as tempdir:
|
||||
fakeproj = self.setUpManifest(tempdir)
|
||||
|
||||
# Set property using the expected Set method, then ensure
|
||||
# the porperty functions are using the correct Get methods.
|
||||
fakeproj.config.SetString(
|
||||
'manifest.standalone', 'https://chicken/manifest.git')
|
||||
self.assertEqual(
|
||||
fakeproj.standalone_manifest_url, 'https://chicken/manifest.git')
|
||||
# Set property using the expected Set method, then ensure
|
||||
# the porperty functions are using the correct Get methods.
|
||||
fakeproj.config.SetString(
|
||||
"manifest.standalone", "https://chicken/manifest.git"
|
||||
)
|
||||
self.assertEqual(
|
||||
fakeproj.standalone_manifest_url, "https://chicken/manifest.git"
|
||||
)
|
||||
|
||||
fakeproj.config.SetString('manifest.groups', 'test-group, admin-group')
|
||||
self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group')
|
||||
fakeproj.config.SetString(
|
||||
"manifest.groups", "test-group, admin-group"
|
||||
)
|
||||
self.assertEqual(
|
||||
fakeproj.manifest_groups, "test-group, admin-group"
|
||||
)
|
||||
|
||||
fakeproj.config.SetString('repo.reference', 'mirror/ref')
|
||||
self.assertEqual(fakeproj.reference, 'mirror/ref')
|
||||
fakeproj.config.SetString("repo.reference", "mirror/ref")
|
||||
self.assertEqual(fakeproj.reference, "mirror/ref")
|
||||
|
||||
fakeproj.config.SetBoolean('repo.dissociate', False)
|
||||
self.assertFalse(fakeproj.dissociate)
|
||||
fakeproj.config.SetBoolean("repo.dissociate", False)
|
||||
self.assertFalse(fakeproj.dissociate)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.archive', False)
|
||||
self.assertFalse(fakeproj.archive)
|
||||
fakeproj.config.SetBoolean("repo.archive", False)
|
||||
self.assertFalse(fakeproj.archive)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.mirror', False)
|
||||
self.assertFalse(fakeproj.mirror)
|
||||
fakeproj.config.SetBoolean("repo.mirror", False)
|
||||
self.assertFalse(fakeproj.mirror)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.worktree', False)
|
||||
self.assertFalse(fakeproj.use_worktree)
|
||||
fakeproj.config.SetBoolean("repo.worktree", False)
|
||||
self.assertFalse(fakeproj.use_worktree)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.clonebundle', False)
|
||||
self.assertFalse(fakeproj.clone_bundle)
|
||||
fakeproj.config.SetBoolean("repo.clonebundle", False)
|
||||
self.assertFalse(fakeproj.clone_bundle)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.submodules', False)
|
||||
self.assertFalse(fakeproj.submodules)
|
||||
fakeproj.config.SetBoolean("repo.submodules", False)
|
||||
self.assertFalse(fakeproj.submodules)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.git-lfs', False)
|
||||
self.assertFalse(fakeproj.git_lfs)
|
||||
fakeproj.config.SetBoolean("repo.git-lfs", False)
|
||||
self.assertFalse(fakeproj.git_lfs)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.superproject', False)
|
||||
self.assertFalse(fakeproj.use_superproject)
|
||||
fakeproj.config.SetBoolean("repo.superproject", False)
|
||||
self.assertFalse(fakeproj.use_superproject)
|
||||
|
||||
fakeproj.config.SetBoolean('repo.partialclone', False)
|
||||
self.assertFalse(fakeproj.partial_clone)
|
||||
fakeproj.config.SetBoolean("repo.partialclone", False)
|
||||
self.assertFalse(fakeproj.partial_clone)
|
||||
|
||||
fakeproj.config.SetString('repo.depth', '48')
|
||||
self.assertEqual(fakeproj.depth, '48')
|
||||
fakeproj.config.SetString("repo.depth", "48")
|
||||
self.assertEqual(fakeproj.depth, "48")
|
||||
|
||||
fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M')
|
||||
self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M')
|
||||
fakeproj.config.SetString("repo.clonefilter", "blob:limit=10M")
|
||||
self.assertEqual(fakeproj.clone_filter, "blob:limit=10M")
|
||||
|
||||
fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo')
|
||||
self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo')
|
||||
fakeproj.config.SetString(
|
||||
"repo.partialcloneexclude", "third_party/big_repo"
|
||||
)
|
||||
self.assertEqual(
|
||||
fakeproj.partial_clone_exclude, "third_party/big_repo"
|
||||
)
|
||||
|
||||
fakeproj.config.SetString('manifest.platform', 'auto')
|
||||
self.assertEqual(fakeproj.manifest_platform, 'auto')
|
||||
fakeproj.config.SetString("manifest.platform", "auto")
|
||||
self.assertEqual(fakeproj.manifest_platform, "auto")
|
||||
|
@ -22,35 +22,39 @@ import repo_trace
|
||||
|
||||
|
||||
class TraceTests(unittest.TestCase):
|
||||
"""Check Trace behavior."""
|
||||
"""Check Trace behavior."""
|
||||
|
||||
def testTrace_MaxSizeEnforced(self):
|
||||
content = 'git chicken'
|
||||
def testTrace_MaxSizeEnforced(self):
|
||||
content = "git chicken"
|
||||
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
|
||||
|
||||
with repo_trace.Trace(content):
|
||||
pass
|
||||
self.assertGreater(
|
||||
os.path.getsize(repo_trace._TRACE_FILE), first_trace_size)
|
||||
with repo_trace.Trace(content):
|
||||
pass
|
||||
self.assertGreater(
|
||||
os.path.getsize(repo_trace._TRACE_FILE), first_trace_size
|
||||
)
|
||||
|
||||
# Check we clear everything is the last chunk is larger than _MAX_SIZE.
|
||||
with mock.patch('repo_trace._MAX_SIZE', 0):
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(first_trace_size,
|
||||
os.path.getsize(repo_trace._TRACE_FILE))
|
||||
# Check we clear everything is the last chunk is larger than _MAX_SIZE.
|
||||
with mock.patch("repo_trace._MAX_SIZE", 0):
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(
|
||||
first_trace_size, os.path.getsize(repo_trace._TRACE_FILE)
|
||||
)
|
||||
|
||||
# Check we only clear the chunks we need to.
|
||||
repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(first_trace_size * 2,
|
||||
os.path.getsize(repo_trace._TRACE_FILE))
|
||||
# Check we only clear the chunks we need to.
|
||||
repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(
|
||||
first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
|
||||
)
|
||||
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(first_trace_size * 2,
|
||||
os.path.getsize(repo_trace._TRACE_FILE))
|
||||
with repo_trace.Trace(content, first_trace=True):
|
||||
pass
|
||||
self.assertEqual(
|
||||
first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
|
||||
)
|
||||
|
@ -23,52 +23,56 @@ import ssh
|
||||
|
||||
|
||||
class SshTests(unittest.TestCase):
|
||||
"""Tests the ssh functions."""
|
||||
"""Tests the ssh functions."""
|
||||
|
||||
def test_parse_ssh_version(self):
|
||||
"""Check _parse_ssh_version() handling."""
|
||||
ver = ssh._parse_ssh_version('Unknown\n')
|
||||
self.assertEqual(ver, ())
|
||||
ver = ssh._parse_ssh_version('OpenSSH_1.0\n')
|
||||
self.assertEqual(ver, (1, 0))
|
||||
ver = ssh._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n')
|
||||
self.assertEqual(ver, (6, 6, 1))
|
||||
ver = ssh._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n')
|
||||
self.assertEqual(ver, (7, 6))
|
||||
def test_parse_ssh_version(self):
|
||||
"""Check _parse_ssh_version() handling."""
|
||||
ver = ssh._parse_ssh_version("Unknown\n")
|
||||
self.assertEqual(ver, ())
|
||||
ver = ssh._parse_ssh_version("OpenSSH_1.0\n")
|
||||
self.assertEqual(ver, (1, 0))
|
||||
ver = ssh._parse_ssh_version(
|
||||
"OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n"
|
||||
)
|
||||
self.assertEqual(ver, (6, 6, 1))
|
||||
ver = ssh._parse_ssh_version(
|
||||
"OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n"
|
||||
)
|
||||
self.assertEqual(ver, (7, 6))
|
||||
|
||||
def test_version(self):
|
||||
"""Check version() handling."""
|
||||
with mock.patch('ssh._run_ssh_version', return_value='OpenSSH_1.2\n'):
|
||||
self.assertEqual(ssh.version(), (1, 2))
|
||||
def test_version(self):
|
||||
"""Check version() handling."""
|
||||
with mock.patch("ssh._run_ssh_version", return_value="OpenSSH_1.2\n"):
|
||||
self.assertEqual(ssh.version(), (1, 2))
|
||||
|
||||
def test_context_manager_empty(self):
|
||||
"""Verify context manager with no clients works correctly."""
|
||||
with multiprocessing.Manager() as manager:
|
||||
with ssh.ProxyManager(manager):
|
||||
pass
|
||||
def test_context_manager_empty(self):
|
||||
"""Verify context manager with no clients works correctly."""
|
||||
with multiprocessing.Manager() as manager:
|
||||
with ssh.ProxyManager(manager):
|
||||
pass
|
||||
|
||||
def test_context_manager_child_cleanup(self):
|
||||
"""Verify orphaned clients & masters get cleaned up."""
|
||||
with multiprocessing.Manager() as manager:
|
||||
with ssh.ProxyManager(manager) as ssh_proxy:
|
||||
client = subprocess.Popen(['sleep', '964853320'])
|
||||
ssh_proxy.add_client(client)
|
||||
master = subprocess.Popen(['sleep', '964853321'])
|
||||
ssh_proxy.add_master(master)
|
||||
# If the process still exists, these will throw timeout errors.
|
||||
client.wait(0)
|
||||
master.wait(0)
|
||||
def test_context_manager_child_cleanup(self):
|
||||
"""Verify orphaned clients & masters get cleaned up."""
|
||||
with multiprocessing.Manager() as manager:
|
||||
with ssh.ProxyManager(manager) as ssh_proxy:
|
||||
client = subprocess.Popen(["sleep", "964853320"])
|
||||
ssh_proxy.add_client(client)
|
||||
master = subprocess.Popen(["sleep", "964853321"])
|
||||
ssh_proxy.add_master(master)
|
||||
# If the process still exists, these will throw timeout errors.
|
||||
client.wait(0)
|
||||
master.wait(0)
|
||||
|
||||
def test_ssh_sock(self):
|
||||
"""Check sock() function."""
|
||||
manager = multiprocessing.Manager()
|
||||
proxy = ssh.ProxyManager(manager)
|
||||
with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'):
|
||||
# old ssh version uses port
|
||||
with mock.patch('ssh.version', return_value=(6, 6)):
|
||||
self.assertTrue(proxy.sock().endswith('%p'))
|
||||
def test_ssh_sock(self):
|
||||
"""Check sock() function."""
|
||||
manager = multiprocessing.Manager()
|
||||
proxy = ssh.ProxyManager(manager)
|
||||
with mock.patch("tempfile.mkdtemp", return_value="/tmp/foo"):
|
||||
# Old ssh version uses port.
|
||||
with mock.patch("ssh.version", return_value=(6, 6)):
|
||||
self.assertTrue(proxy.sock().endswith("%p"))
|
||||
|
||||
proxy._sock_path = None
|
||||
# new ssh version uses hash
|
||||
with mock.patch('ssh.version', return_value=(6, 7)):
|
||||
self.assertTrue(proxy.sock().endswith('%C'))
|
||||
proxy._sock_path = None
|
||||
# New ssh version uses hash.
|
||||
with mock.patch("ssh.version", return_value=(6, 7)):
|
||||
self.assertTrue(proxy.sock().endswith("%C"))
|
||||
|
@ -21,53 +21,57 @@ import subcmds
|
||||
|
||||
|
||||
class AllCommands(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def test_required_basic(self):
|
||||
"""Basic checking of registered commands."""
|
||||
# NB: We don't test all subcommands as we want to avoid "change detection"
|
||||
# tests, so we just look for the most common/important ones here that are
|
||||
# unlikely to ever change.
|
||||
for cmd in {'cherry-pick', 'help', 'init', 'start', 'sync', 'upload'}:
|
||||
self.assertIn(cmd, subcmds.all_commands)
|
||||
def test_required_basic(self):
|
||||
"""Basic checking of registered commands."""
|
||||
# NB: We don't test all subcommands as we want to avoid "change
|
||||
# detection" tests, so we just look for the most common/important ones
|
||||
# here that are unlikely to ever change.
|
||||
for cmd in {"cherry-pick", "help", "init", "start", "sync", "upload"}:
|
||||
self.assertIn(cmd, subcmds.all_commands)
|
||||
|
||||
def test_naming(self):
|
||||
"""Verify we don't add things that we shouldn't."""
|
||||
for cmd in subcmds.all_commands:
|
||||
# Reject filename suffixes like "help.py".
|
||||
self.assertNotIn('.', cmd)
|
||||
def test_naming(self):
|
||||
"""Verify we don't add things that we shouldn't."""
|
||||
for cmd in subcmds.all_commands:
|
||||
# Reject filename suffixes like "help.py".
|
||||
self.assertNotIn(".", cmd)
|
||||
|
||||
# Make sure all '_' were converted to '-'.
|
||||
self.assertNotIn('_', cmd)
|
||||
# Make sure all '_' were converted to '-'.
|
||||
self.assertNotIn("_", cmd)
|
||||
|
||||
# Reject internal python paths like "__init__".
|
||||
self.assertFalse(cmd.startswith('__'))
|
||||
# Reject internal python paths like "__init__".
|
||||
self.assertFalse(cmd.startswith("__"))
|
||||
|
||||
def test_help_desc_style(self):
|
||||
"""Force some consistency in option descriptions.
|
||||
def test_help_desc_style(self):
|
||||
"""Force some consistency in option descriptions.
|
||||
|
||||
Python's optparse & argparse has a few default options like --help. Their
|
||||
option description text uses lowercase sentence fragments, so enforce our
|
||||
options follow the same style so UI is consistent.
|
||||
Python's optparse & argparse has a few default options like --help.
|
||||
Their option description text uses lowercase sentence fragments, so
|
||||
enforce our options follow the same style so UI is consistent.
|
||||
|
||||
We enforce:
|
||||
* Text starts with lowercase.
|
||||
* Text doesn't end with period.
|
||||
"""
|
||||
for name, cls in subcmds.all_commands.items():
|
||||
cmd = cls()
|
||||
parser = cmd.OptionParser
|
||||
for option in parser.option_list:
|
||||
if option.help == optparse.SUPPRESS_HELP:
|
||||
continue
|
||||
We enforce:
|
||||
* Text starts with lowercase.
|
||||
* Text doesn't end with period.
|
||||
"""
|
||||
for name, cls in subcmds.all_commands.items():
|
||||
cmd = cls()
|
||||
parser = cmd.OptionParser
|
||||
for option in parser.option_list:
|
||||
if option.help == optparse.SUPPRESS_HELP:
|
||||
continue
|
||||
|
||||
c = option.help[0]
|
||||
self.assertEqual(
|
||||
c.lower(), c,
|
||||
msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
|
||||
f'should start with lowercase: "{option.help}"')
|
||||
c = option.help[0]
|
||||
self.assertEqual(
|
||||
c.lower(),
|
||||
c,
|
||||
msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
|
||||
f'help text should start with lowercase: "{option.help}"',
|
||||
)
|
||||
|
||||
self.assertNotEqual(
|
||||
option.help[-1], '.',
|
||||
msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text '
|
||||
f'should not end in a period: "{option.help}"')
|
||||
self.assertNotEqual(
|
||||
option.help[-1],
|
||||
".",
|
||||
msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
|
||||
f'help text should not end in a period: "{option.help}"',
|
||||
)
|
||||
|
@ -20,30 +20,27 @@ from subcmds import init
|
||||
|
||||
|
||||
class InitCommand(unittest.TestCase):
|
||||
"""Check registered all_commands."""
|
||||
"""Check registered all_commands."""
|
||||
|
||||
def setUp(self):
|
||||
self.cmd = init.Init()
|
||||
def setUp(self):
|
||||
self.cmd = init.Init()
|
||||
|
||||
def test_cli_parser_good(self):
|
||||
"""Check valid command line options."""
|
||||
ARGV = (
|
||||
[],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
def test_cli_parser_good(self):
|
||||
"""Check valid command line options."""
|
||||
ARGV = ([],)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
|
||||
def test_cli_parser_bad(self):
|
||||
"""Check invalid command line options."""
|
||||
ARGV = (
|
||||
# Too many arguments.
|
||||
['url', 'asdf'],
|
||||
|
||||
# Conflicting options.
|
||||
['--mirror', '--archive'],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
with self.assertRaises(SystemExit):
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
def test_cli_parser_bad(self):
|
||||
"""Check invalid command line options."""
|
||||
ARGV = (
|
||||
# Too many arguments.
|
||||
["url", "asdf"],
|
||||
# Conflicting options.
|
||||
["--mirror", "--archive"],
|
||||
)
|
||||
for argv in ARGV:
|
||||
opts, args = self.cmd.OptionParser.parse_args(argv)
|
||||
with self.assertRaises(SystemExit):
|
||||
self.cmd.ValidateOptions(opts, args)
|
||||
|
@ -23,111 +23,138 @@ import command
|
||||
from subcmds import sync
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_superproject, cli_args, result', [
|
||||
(True, ['--current-branch'], True),
|
||||
(True, ['--no-current-branch'], True),
|
||||
(True, [], True),
|
||||
(False, ['--current-branch'], True),
|
||||
(False, ['--no-current-branch'], False),
|
||||
(False, [], None),
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"use_superproject, cli_args, result",
|
||||
[
|
||||
(True, ["--current-branch"], True),
|
||||
(True, ["--no-current-branch"], True),
|
||||
(True, [], True),
|
||||
(False, ["--current-branch"], True),
|
||||
(False, ["--no-current-branch"], False),
|
||||
(False, [], None),
|
||||
],
|
||||
)
|
||||
def test_get_current_branch_only(use_superproject, cli_args, result):
|
||||
"""Test Sync._GetCurrentBranchOnly logic.
|
||||
"""Test Sync._GetCurrentBranchOnly logic.
|
||||
|
||||
Sync._GetCurrentBranchOnly should return True if a superproject is requested,
|
||||
and otherwise the value of the current_branch_only option.
|
||||
"""
|
||||
cmd = sync.Sync()
|
||||
opts, _ = cmd.OptionParser.parse_args(cli_args)
|
||||
Sync._GetCurrentBranchOnly should return True if a superproject is
|
||||
requested, and otherwise the value of the current_branch_only option.
|
||||
"""
|
||||
cmd = sync.Sync()
|
||||
opts, _ = cmd.OptionParser.parse_args(cli_args)
|
||||
|
||||
with mock.patch('git_superproject.UseSuperproject',
|
||||
return_value=use_superproject):
|
||||
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
||||
with mock.patch(
|
||||
"git_superproject.UseSuperproject", return_value=use_superproject
|
||||
):
|
||||
assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
|
||||
|
||||
|
||||
# Used to patch os.cpu_count() for reliable results.
|
||||
OS_CPU_COUNT = 24
|
||||
|
||||
@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [
|
||||
# No user or manifest settings.
|
||||
([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
|
||||
# No user settings, so manifest settings control.
|
||||
([], 3, 3, 3, 3),
|
||||
# User settings, but no manifest.
|
||||
(['--jobs=4'], None, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], None, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6),
|
||||
(['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS),
|
||||
(['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6),
|
||||
# User settings with manifest settings.
|
||||
(['--jobs=4'], 3, 4, 4, 4),
|
||||
(['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4),
|
||||
(['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6),
|
||||
(['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6),
|
||||
(['--jobs-network=5'], 3, 3, 5, 3),
|
||||
(['--jobs-checkout=6'], 3, 3, 3, 6),
|
||||
(['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6),
|
||||
# Settings that exceed rlimits get capped.
|
||||
(['--jobs=1000000'], None, 83, 83, 83),
|
||||
([], 1000000, 83, 83, 83),
|
||||
])
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"argv, jobs_manifest, jobs, jobs_net, jobs_check",
|
||||
[
|
||||
# No user or manifest settings.
|
||||
([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
|
||||
# No user settings, so manifest settings control.
|
||||
([], 3, 3, 3, 3),
|
||||
# User settings, but no manifest.
|
||||
(["--jobs=4"], None, 4, 4, 4),
|
||||
(["--jobs=4", "--jobs-network=5"], None, 4, 5, 4),
|
||||
(["--jobs=4", "--jobs-checkout=6"], None, 4, 4, 6),
|
||||
(["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], None, 4, 5, 6),
|
||||
(
|
||||
["--jobs-network=5"],
|
||||
None,
|
||||
OS_CPU_COUNT,
|
||||
5,
|
||||
command.DEFAULT_LOCAL_JOBS,
|
||||
),
|
||||
(["--jobs-checkout=6"], None, OS_CPU_COUNT, 1, 6),
|
||||
(["--jobs-network=5", "--jobs-checkout=6"], None, OS_CPU_COUNT, 5, 6),
|
||||
# User settings with manifest settings.
|
||||
(["--jobs=4"], 3, 4, 4, 4),
|
||||
(["--jobs=4", "--jobs-network=5"], 3, 4, 5, 4),
|
||||
(["--jobs=4", "--jobs-checkout=6"], 3, 4, 4, 6),
|
||||
(["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], 3, 4, 5, 6),
|
||||
(["--jobs-network=5"], 3, 3, 5, 3),
|
||||
(["--jobs-checkout=6"], 3, 3, 3, 6),
|
||||
(["--jobs-network=5", "--jobs-checkout=6"], 3, 3, 5, 6),
|
||||
# Settings that exceed rlimits get capped.
|
||||
(["--jobs=1000000"], None, 83, 83, 83),
|
||||
([], 1000000, 83, 83, 83),
|
||||
],
|
||||
)
|
||||
def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
|
||||
"""Tests --jobs option behavior."""
|
||||
mp = mock.MagicMock()
|
||||
mp.manifest.default.sync_j = jobs_manifest
|
||||
"""Tests --jobs option behavior."""
|
||||
mp = mock.MagicMock()
|
||||
mp.manifest.default.sync_j = jobs_manifest
|
||||
|
||||
cmd = sync.Sync()
|
||||
opts, args = cmd.OptionParser.parse_args(argv)
|
||||
cmd.ValidateOptions(opts, args)
|
||||
cmd = sync.Sync()
|
||||
opts, args = cmd.OptionParser.parse_args(argv)
|
||||
cmd.ValidateOptions(opts, args)
|
||||
|
||||
with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)):
|
||||
with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT):
|
||||
cmd._ValidateOptionsWithManifest(opts, mp)
|
||||
assert opts.jobs == jobs
|
||||
assert opts.jobs_network == jobs_net
|
||||
assert opts.jobs_checkout == jobs_check
|
||||
with mock.patch.object(sync, "_rlimit_nofile", return_value=(256, 256)):
|
||||
with mock.patch.object(os, "cpu_count", return_value=OS_CPU_COUNT):
|
||||
cmd._ValidateOptionsWithManifest(opts, mp)
|
||||
assert opts.jobs == jobs
|
||||
assert opts.jobs_network == jobs_net
|
||||
assert opts.jobs_checkout == jobs_check
|
||||
|
||||
|
||||
class GetPreciousObjectsState(unittest.TestCase):
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
"""Tests for _GetPreciousObjectsState."""
|
||||
|
||||
def setUp(self):
|
||||
"""Common setup."""
|
||||
self.cmd = sync.Sync()
|
||||
self.project = p = mock.MagicMock(use_git_worktrees=False,
|
||||
UseAlternates=False)
|
||||
p.manifest.GetProjectsWithName.return_value = [p]
|
||||
def setUp(self):
|
||||
"""Common setup."""
|
||||
self.cmd = sync.Sync()
|
||||
self.project = p = mock.MagicMock(
|
||||
use_git_worktrees=False, UseAlternates=False
|
||||
)
|
||||
p.manifest.GetProjectsWithName.return_value = [p]
|
||||
|
||||
self.opt = mock.Mock(spec_set=['this_manifest_only'])
|
||||
self.opt.this_manifest_only = False
|
||||
self.opt = mock.Mock(spec_set=["this_manifest_only"])
|
||||
self.opt.this_manifest_only = False
|
||||
|
||||
def test_worktrees(self):
|
||||
"""False for worktrees."""
|
||||
self.project.use_git_worktrees = True
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
def test_worktrees(self):
|
||||
"""False for worktrees."""
|
||||
self.project.use_git_worktrees = True
|
||||
self.assertFalse(
|
||||
self.cmd._GetPreciousObjectsState(self.project, self.opt)
|
||||
)
|
||||
|
||||
def test_not_shared(self):
|
||||
"""Singleton project."""
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
def test_not_shared(self):
|
||||
"""Singleton project."""
|
||||
self.assertFalse(
|
||||
self.cmd._GetPreciousObjectsState(self.project, self.opt)
|
||||
)
|
||||
|
||||
def test_shared(self):
|
||||
"""Shared project."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project, self.project
|
||||
]
|
||||
self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
def test_shared(self):
|
||||
"""Shared project."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project,
|
||||
self.project,
|
||||
]
|
||||
self.assertTrue(
|
||||
self.cmd._GetPreciousObjectsState(self.project, self.opt)
|
||||
)
|
||||
|
||||
def test_shared_with_alternates(self):
|
||||
"""Shared project, with alternates."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project, self.project
|
||||
]
|
||||
self.project.UseAlternates = True
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
def test_shared_with_alternates(self):
|
||||
"""Shared project, with alternates."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = [
|
||||
self.project,
|
||||
self.project,
|
||||
]
|
||||
self.project.UseAlternates = True
|
||||
self.assertFalse(
|
||||
self.cmd._GetPreciousObjectsState(self.project, self.opt)
|
||||
)
|
||||
|
||||
def test_not_found(self):
|
||||
"""Project not found in manifest."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = []
|
||||
self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt))
|
||||
def test_not_found(self):
|
||||
"""Project not found in manifest."""
|
||||
self.project.manifest.GetProjectsWithName.return_value = []
|
||||
self.assertFalse(
|
||||
self.cmd._GetPreciousObjectsState(self.project, self.opt)
|
||||
)
|
||||
|
@ -20,9 +20,9 @@ from release import update_manpages
|
||||
|
||||
|
||||
class UpdateManpagesTest(unittest.TestCase):
|
||||
"""Tests the update-manpages code."""
|
||||
"""Tests the update-manpages code."""
|
||||
|
||||
def test_replace_regex(self):
|
||||
"""Check that replace_regex works."""
|
||||
data = '\n\033[1mSummary\033[m\n'
|
||||
self.assertEqual(update_manpages.replace_regex(data),'\nSummary\n')
|
||||
def test_replace_regex(self):
|
||||
"""Check that replace_regex works."""
|
||||
data = "\n\033[1mSummary\033[m\n"
|
||||
self.assertEqual(update_manpages.replace_regex(data), "\nSummary\n")
|
||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user