git cl try --luci, a set of hacks to demonstrate and iterate LUCI
BUG=532220 Review URL: https://codereview.chromium.org/1344183002 git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@296885 0039d316-1c4b-4281-b951-d872f2087c98changes/01/332501/1
parent
47ea639f5f
commit
feb9e2a4ea
@ -0,0 +1,35 @@
|
||||
LUCI Hacks - A set of shims used to provide an iterable end-to-end demo.
|
||||
|
||||
The main goal of Luci Hakcs is to be able to use iterate on Milo as if it was
|
||||
displaying real data. These are a couple of hacks used to get LUCI running from
|
||||
"git cl try --luci" to displaying a page on Milo. These include:
|
||||
|
||||
luci_recipe_run.py:
|
||||
* Downloading a depot_tools tarball onto swarming from Google Storage to bootstrap gclient.
|
||||
** LUCI shouldn't require depot_tools or gclient.
|
||||
* Running gclient on a swarming slave to bootstrap a full build+infra checkout.
|
||||
** M1: This should check out the recipes repo instead.
|
||||
** M2: The recipes repo should have been already isolated.
|
||||
* Seeding properties by emitting annotation in stdout so that Milo can pick it
|
||||
up
|
||||
* Running annotated_run.py from a fake build directory "build/slave/bot/build"
|
||||
|
||||
trigger_luci_job.py:
|
||||
* Master/Builder -> Recipe + Platform mapping is hardcoded into this file. This
|
||||
is information that is otherwise encoded into master.cfg/slaves.cfg.
|
||||
** Actually I lied, we just assume linux right now.
|
||||
** M1: This information should be encoded into the recipe via luci.cfg
|
||||
* Swarming client is checked out via "git clone <swarming repo>"
|
||||
* Swarming server is hard coded into the file. This info should also be pulled
|
||||
out from luci.cfg
|
||||
* Triggering is done directly to swarming. Once Swarming is able to pull from
|
||||
DM we can send jobs to DM instead of swarming.
|
||||
|
||||
|
||||
Misc:
|
||||
* This just runs the full recipe on the bot. Yes, including bot_update.
|
||||
** In the future this would be probably an isolated checkout?
|
||||
** This also includes having git_cache either set up a local cache, or download
|
||||
the bootstrap zip file on every invocation. In reality there isn't a huge
|
||||
time penalty for doing this, but at scale it does incur a non-trival amount of
|
||||
unnecessary bandwidth.
|
@ -0,0 +1,12 @@
|
||||
{
|
||||
'variables': {
|
||||
'files': [
|
||||
'luci_recipe_run.py',
|
||||
],
|
||||
'command': [
|
||||
'python',
|
||||
'luci_recipe_run.py',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
"""Download recipe prerequisites and run a single recipe."""
|
||||
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import urllib2
|
||||
import zlib
|
||||
|
||||
|
||||
def download(source, dest):
|
||||
u = urllib2.urlopen(source) # TODO: Verify certificate?
|
||||
with open(dest, 'wb') as f:
|
||||
while True:
|
||||
buf = u.read(8192)
|
||||
if not buf:
|
||||
break
|
||||
f.write(buf)
|
||||
|
||||
|
||||
def unzip(source, dest):
|
||||
with tarfile.open(source, 'r') as z:
|
||||
z.extractall(dest)
|
||||
|
||||
|
||||
def get_infra(dt_dir, root_dir):
|
||||
fetch = os.path.join(dt_dir, 'fetch.py')
|
||||
subprocess.check_call([sys.executable, fetch, 'infra'], cwd=root_dir)
|
||||
|
||||
|
||||
def seed_properties(args):
|
||||
# Assumes args[0] is factory properties and args[1] is build properties.
|
||||
fact_prop_str = args[0][len('--factory-properties-gz='):]
|
||||
build_prop_str = args[1][len('--build-properties-gz='):]
|
||||
fact_prop = json.loads(zlib.decompress(base64.b64decode(fact_prop_str)))
|
||||
build_prop = json.loads(zlib.decompress(base64.b64decode(build_prop_str)))
|
||||
for k, v in fact_prop.iteritems():
|
||||
print '@@@SET_BUILD_PROPERTY@%s@%s@@@' % (k, v)
|
||||
for k, v in build_prop.iteritems():
|
||||
print '@@@SET_BUILD_PROPERTY@%s@%s@@@' % (k, v)
|
||||
|
||||
|
||||
def main(args):
|
||||
cwd = os.getcwd()
|
||||
|
||||
# Bootstrap depot tools (required for fetching build/infra)
|
||||
dt_url = 'https://storage.googleapis.com/dumbtest/depot_tools.tar.gz'
|
||||
dt_dir = os.path.join(cwd, 'staging')
|
||||
os.makedirs(dt_dir)
|
||||
dt_zip = os.path.join(dt_dir, 'depot_tools.tar.gz')
|
||||
download(dt_url, os.path.join(dt_zip))
|
||||
unzip(dt_zip, dt_dir)
|
||||
dt_path = os.path.join(dt_dir, 'depot_tools')
|
||||
os.environ['PATH'] = '%s:%s' % (dt_path, os.environ['PATH'])
|
||||
|
||||
# Fetch infra (which comes with build, which comes with recipes)
|
||||
root_dir = os.path.join(cwd, 'b')
|
||||
os.makedirs(root_dir)
|
||||
get_infra(dt_path, root_dir)
|
||||
work_dir = os.path.join(root_dir, 'build', 'slave', 'bot', 'build')
|
||||
os.makedirs(work_dir)
|
||||
|
||||
# Emit annotations that encapsulates build properties.
|
||||
seed_properties(args)
|
||||
|
||||
# JUST DO IT.
|
||||
cmd = [sys.executable, '-u', '../../../scripts/slave/annotated_run.py']
|
||||
cmd.extend(args)
|
||||
subprocess.check_call(cmd, cwd=work_dir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
"""Tool to send a recipe job to run on Swarming."""
|
||||
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import zlib
|
||||
|
||||
|
||||
SWARMING_URL = 'https://chromium.googlesource.com/external/swarming.client.git'
|
||||
CLIENT_LOCATION = os.path.expanduser('~/.swarming_client')
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
ISOLATE = os.path.join(THIS_DIR, 'luci_recipe_run.isolate')
|
||||
|
||||
# This is put in place in order to not need to parse this information from
|
||||
# master.cfg. In the LUCI future this would all be stored in a luci.cfg
|
||||
# file alongside the repo.
|
||||
RECIPE_MAPPING = {
|
||||
'Infra Linux Trusty 64 Tester':
|
||||
('tryserver.infra', 'infra/infra_repo_trybot', 'Ubuntu-14.04'),
|
||||
'Infra Linux Precise 32 Tester':
|
||||
('tryserver.infra', 'infra/infra_repo_trybot', 'Ubuntu-12.04'),
|
||||
'Infra Mac Tester':
|
||||
('tryserver.infra', 'infra/infra_repo_trybot', 'Mac'),
|
||||
'Infra Win Tester':
|
||||
('tryserver.infra', 'infra/infra_repo_trybot', 'Win'),
|
||||
'Infra Windows Tester':
|
||||
('tryserver.infra', 'infra/infra_repo_trybot', 'Win'),
|
||||
'Infra Presubmit':
|
||||
('tryserver.infra', 'run_presubmit', 'Linux')
|
||||
}
|
||||
|
||||
|
||||
def parse_args(args):
|
||||
# Once Clank switches to bot_update, bot_update would no longer require
|
||||
# master/builder detection, and we can remove the master/builder from the args
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--builder', required=True)
|
||||
parser.add_argument('--issue',required=True)
|
||||
parser.add_argument('--patchset', required=True)
|
||||
parser.add_argument('--revision', default='HEAD')
|
||||
parser.add_argument('--patch_project')
|
||||
|
||||
return parser.parse_args(args)
|
||||
|
||||
|
||||
def ensure_swarming_client():
|
||||
if not os.path.exists(CLIENT_LOCATION):
|
||||
parent, target = os.path.split(CLIENT_LOCATION)
|
||||
subprocess.check_call(['git', 'clone', SWARMING_URL, target], cwd=parent)
|
||||
else:
|
||||
subprocess.check_call(['git', 'pull'], cwd=CLIENT_LOCATION)
|
||||
|
||||
|
||||
def archive_isolate(isolate):
|
||||
isolate_py = os.path.join(CLIENT_LOCATION, 'isolate.py')
|
||||
cmd = [
|
||||
sys.executable, isolate_py, 'archive',
|
||||
'--isolated=%sd' % isolate,
|
||||
'--isolate-server', 'https://isolateserver.appspot.com',
|
||||
'--isolate=%s' % isolate]
|
||||
out = subprocess.check_output(cmd)
|
||||
return out.split()[0].strip()
|
||||
|
||||
|
||||
def trigger_swarm(isolated, platform, build_props, factory_props):
|
||||
# TODO: Make this trigger DM instead.
|
||||
swarm_py = os.path.join(CLIENT_LOCATION, 'swarming.py')
|
||||
build_props_gz = base64.b64encode(zlib.compress(json.dumps(build_props)))
|
||||
fac_props_gz = base64.b64encode(zlib.compress(json.dumps(factory_props)))
|
||||
cmd = [
|
||||
sys.executable, swarm_py, 'trigger', isolated,
|
||||
'--isolate-server', 'isolateserver.appspot.com',
|
||||
'--swarming', 'chromium-swarm-dev.appspot.com',
|
||||
'-d', 'os', platform,
|
||||
'--',
|
||||
'--factory-properties-gz=%s' % fac_props_gz,
|
||||
'--build-properties-gz=%s' % build_props_gz
|
||||
]
|
||||
out = subprocess.check_output(cmd)
|
||||
m = re.search(
|
||||
r'https://chromium-swarm-dev.appspot.com/user/task/(.*)', out)
|
||||
return m.group(1)
|
||||
|
||||
|
||||
def trigger(builder, revision, issue, patchset, project):
|
||||
"""Constructs/uploads an isolated file and send the job to swarming."""
|
||||
master, recipe, platform = RECIPE_MAPPING[builder]
|
||||
build_props = {
|
||||
'buildnumber': 1,
|
||||
'buildername': builder,
|
||||
'recipe': recipe,
|
||||
'mastername': master,
|
||||
'slavename': 'fakeslave',
|
||||
'revision': revision,
|
||||
'patch_project': project,
|
||||
}
|
||||
if issue:
|
||||
build_props['issue'] = issue
|
||||
if patchset:
|
||||
build_props['patchset'] = patchset
|
||||
factory_props = {
|
||||
'recipe': recipe
|
||||
}
|
||||
ensure_swarming_client()
|
||||
arun_isolated = archive_isolate(ISOLATE)
|
||||
task = trigger_swarm(arun_isolated, platform, build_props, factory_props)
|
||||
print 'https://luci-milo.appspot.com/swarming/%s' % task
|
||||
|
||||
|
||||
def main(args):
|
||||
args = parse_args(args)
|
||||
trigger(args.builder, args.revision, args.issue,
|
||||
args.patchset, args.patch_project)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
Loading…
Reference in New Issue