sync: Add support to dump a JSON event log of all sync events.

Change-Id: Id4852968ac1b2bf0093007cf2e5ca951ddab8b3b
This commit is contained in:
David Riley 2017-04-05 00:02:59 -07:00 committed by David Pursehouse
parent fef9f21b28
commit e0684addee
6 changed files with 236 additions and 13 deletions

View File

@ -19,6 +19,7 @@ import platform
import re
import sys
from event_log import EventLog
from error import NoSuchProjectError
from error import InvalidProjectGroupsError
@ -28,6 +29,7 @@ class Command(object):
"""
common = False
event_log = EventLog()
manifest = None
_optparse = None

177
event_log.py Normal file
View File

@ -0,0 +1,177 @@
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import multiprocessing
TASK_COMMAND = 'command'
TASK_SYNC_NETWORK = 'sync-network'
TASK_SYNC_LOCAL = 'sync-local'
class EventLog(object):
"""Event log that records events that occurred during a repo invocation.
Events are written to the log as a consecutive JSON entries, one per line.
Each entry contains the following keys:
- id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
The ID is only unique for the invocation of the repo command.
- name: Name of the object being operated upon.
- task_name: The task that was performed.
- start: Timestamp of when the operation started.
- finish: Timestamp of when the operation finished.
- success: Boolean indicating if the operation was successful.
- try_count: A counter indicating the try count of this task.
Optionally:
- parent: A ('RepoOp', ID) tuple indicating the parent event for nested
events.
Valid task_names include:
- command: The invocation of a subcommand.
- sync-network: The network component of a sync command.
- sync-local: The local component of a sync command.
Specific tasks may include additional informational properties.
"""
def __init__(self):
"""Initializes the event log."""
self._log = []
self._next_id = _EventIdGenerator()
self._parent = None
def Add(self, name, task_name, start, finish=None, success=None,
try_count=1, kind='RepoOp'):
"""Add an event to the log.
Args:
name: Name of the object being operated upon.
task_name: A sub-task that was performed for name.
start: Timestamp of when the operation started.
finish: Timestamp of when the operation finished.
success: Boolean indicating if the operation was successful.
try_count: A counter indicating the try count of this task.
kind: The kind of the object for the unique identifier.
Returns:
A dictionary of the event added to the log.
"""
event = {
'id': (kind, self._next_id.next()),
'name': name,
'task_name': task_name,
'start_time': start,
'try': try_count,
}
if self._parent:
event['parent'] = self._parent['id']
if success is not None or finish is not None:
self.FinishEvent(event, finish, success)
self._log.append(event)
return event
def AddSync(self, project, task_name, start, finish, success):
"""Add a event to the log for a sync command.
Args:
project: Project being synced.
task_name: A sub-task that was performed for name.
One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
start: Timestamp of when the operation started.
finish: Timestamp of when the operation finished.
success: Boolean indicating if the operation was successful.
Returns:
A dictionary of the event added to the log.
"""
event = self.Add(project.relpath, success, start, finish, task_name)
if event is not None:
event['project'] = project.name
if project.revisionExpr:
event['revision'] = project.revisionExpr
if project.remote.url:
event['project_url'] = project.remote.url
if project.remote.fetchUrl:
event['remote_url'] = project.remote.fetchUrl
try:
event['git_hash'] = project.GetCommitRevisionId()
except Exception:
pass
return event
def GetStatusString(self, success):
"""Converst a boolean success to a status string.
Args:
success: Boolean indicating if the operation was successful.
Returns:
status string.
"""
return 'pass' if success else 'fail'
def FinishEvent(self, event, finish, success):
"""Finishes an incomplete event.
Args:
event: An event that has been added to the log.
finish: Timestamp of when the operation finished.
success: Boolean indicating if the operation was successful.
Returns:
A dictionary of the event added to the log.
"""
event['status'] = self.GetStatusString(success)
event['finish_time'] = finish
return event
def SetParent(self, event):
"""Set a parent event for all new entities.
Args:
event: The event to use as a parent.
"""
self._parent = event
def Write(self, filename):
"""Writes the log out to a file.
Args:
filename: The file to write the log to.
"""
with open(filename, 'w+') as f:
for e in self._log:
json.dump(e, f, sort_keys=True)
f.write('\n')
def _EventIdGenerator():
"""Returns multi-process safe iterator that generates locally unique id.
Yields:
A unique, to this invocation of the program, integer id.
"""
eid = multiprocessing.Value('i', 1)
while True:
with eid.get_lock():
val = eid.value
eid.value += 1
yield val

15
main.py
View File

@ -37,6 +37,7 @@ except ImportError:
kerberos = None
from color import SetDefaultColoring
import event_log
from trace import SetTrace
from git_command import git, GitCommand
from git_config import init_ssh, close_ssh
@ -85,6 +86,9 @@ global_options.add_option('--time',
global_options.add_option('--version',
dest='show_version', action='store_true',
help='display this version of repo')
global_options.add_option('--event-log',
dest='event_log', action='store',
help='filename of event log to append timeline to')
class _Repo(object):
def __init__(self, repodir):
@ -176,6 +180,8 @@ class _Repo(object):
RunPager(config)
start = time.time()
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
cmd.event_log.SetParent(cmd_event)
try:
result = cmd.Execute(copts, cargs)
except (DownloadError, ManifestInvalidRevisionError,
@ -203,7 +209,8 @@ class _Repo(object):
result = e.code
raise
finally:
elapsed = time.time() - start
finish = time.time()
elapsed = finish - start
hours, remainder = divmod(elapsed, 3600)
minutes, seconds = divmod(remainder, 60)
if gopts.time:
@ -213,6 +220,12 @@ class _Repo(object):
print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
file=sys.stderr)
cmd.event_log.FinishEvent(cmd_event, finish,
result is None or result == 0)
if gopts.event_log:
cmd.event_log.Write(os.path.abspath(
os.path.expanduser(gopts.event_log)))
return result

View File

@ -110,7 +110,8 @@ class _XmlRemote(object):
return url
def ToRemoteSpec(self, projectName):
url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
fetchUrl = self.resolvedFetchUrl.rstrip('/')
url = fetchUrl + '/' + projectName
remoteName = self.name
if self.remoteAlias:
remoteName = self.remoteAlias
@ -118,7 +119,8 @@ class _XmlRemote(object):
url=url,
pushUrl=self.pushUrl,
review=self.reviewUrl,
orig_name=self.name)
orig_name=self.name,
fetchUrl=self.fetchUrl)
class XmlManifest(object):
"""manages the repo configuration file"""

View File

@ -323,13 +323,15 @@ class RemoteSpec(object):
pushUrl=None,
review=None,
revision=None,
orig_name=None):
orig_name=None,
fetchUrl=None):
self.name = name
self.url = url
self.pushUrl = pushUrl
self.review = review
self.revision = revision
self.orig_name = orig_name
self.fetchUrl = fetchUrl
class RepoHook(object):
@ -2876,13 +2878,14 @@ class SyncBuffer(object):
self.detach_head = detach_head
self.clean = True
self.recent_clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
self.clean = False
self._MarkUnclean()
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
@ -2896,6 +2899,15 @@ class SyncBuffer(object):
self._PrintMessages()
return self.clean
def Recently(self):
recent_clean = self.recent_clean
self.recent_clean = True
return recent_clean
def _MarkUnclean(self):
self.clean = False
self.recent_clean = False
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
@ -2904,7 +2916,7 @@ class SyncBuffer(object):
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
self.clean = False
self._MarkUnclean()
return False
setattr(self, queue, [])
return True

View File

@ -64,6 +64,7 @@ try:
except ImportError:
multiprocessing = None
import event_log
from git_command import GIT, git_require
from git_config import GetUrlCookieFile
from git_refs import R_HEADS, HEAD
@ -304,9 +305,10 @@ later is required to fix a server side protocol bug.
# - We always set err_event in the case of an exception.
# - We always make sure we call sem.release().
# - We always make sure we unlock the lock if we locked it.
try:
try:
start = time.time()
success = False
try:
try:
success = project.Sync_NetworkHalf(
quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
@ -345,6 +347,9 @@ later is required to fix a server side protocol bug.
finally:
if did_lock:
lock.release()
finish = time.time()
self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
start, finish, success)
return success
@ -720,16 +725,24 @@ later is required to fix a server side protocol bug.
_PostRepoUpgrade(self.manifest, quiet=opt.quiet)
if not opt.local_only:
mp.Sync_NetworkHalf(quiet=opt.quiet,
start = time.time()
success = mp.Sync_NetworkHalf(quiet=opt.quiet,
current_branch_only=opt.current_branch_only,
no_tags=opt.no_tags,
optimized_fetch=opt.optimized_fetch,
submodules=self.manifest.HasSubmodules)
finish = time.time()
self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
start, finish, success)
if mp.HasChanges:
syncbuf = SyncBuffer(mp.config)
start = time.time()
mp.Sync_LocalHalf(syncbuf, submodules=self.manifest.HasSubmodules)
if not syncbuf.Finish():
clean = syncbuf.Finish()
self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL,
start, time.time(), clean)
if not clean:
sys.exit(1)
self._ReloadManifest(manifest_name)
if opt.jobs is None:
@ -823,7 +836,10 @@ later is required to fix a server side protocol bug.
for project in all_projects:
pm.update()
if project.worktree:
start = time.time()
project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
start, time.time(), syncbuf.Recently())
pm.end()
print(file=sys.stderr)
if not syncbuf.Finish():
@ -907,6 +923,7 @@ def _VerifyTag(project):
return False
return True
class _FetchTimes(object):
_ALPHA = 0.5