Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

change webhook URL and add functionality for each job to post to bot #6719

Merged
merged 1 commit into from
Aug 9, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,4 @@ notifications:
email:
on_success: never
on_failure: always
webhooks: https://w3c-test.org/prbuildbot.py
webhooks: https://pulls.web-platform-tests.org/api/build
1 change: 1 addition & 0 deletions check_stability.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ skip_tests: conformance-checkers docs tools
# (particularly in terms of execution time), making it impractical in most
# cases.
ignore_changes: resources
results_url: https://pulls.web-platform-tests.org/api/stability
59 changes: 54 additions & 5 deletions tools/ci/check_stability.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import sys
from ConfigParser import SafeConfigParser

import requests

wpt_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
sys.path.insert(0, wpt_root)

Expand Down Expand Up @@ -145,10 +147,6 @@ def get_parser():
description = """Detect instabilities in new tests by executing tests
repeatedly and comparing results between executions."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument("--comment-pr",
action="store",
default=os.environ.get("TRAVIS_PULL_REQUEST"),
help="PR to comment on with stability results")
parser.add_argument("--user",
action="store",
# Travis docs say do not depend on USER env variable.
Expand Down Expand Up @@ -197,6 +195,52 @@ def pr():
return pr if pr != "false" else None


def post_results(results, pr_number, iterations, product, url, status):
"""Post stability results to a given URL."""
payload_results = []

for test_name, test in results.iteritems():
subtests = []
for subtest_name, subtest in test['subtests'].items():
subtests.append({
'test': subtest_name,
'result': {
'messages': list(subtest['messages']),
'status': subtest['status']
},
})
payload_results.append({
'test': test_name,
'result': {
'status': test['status'],
'subtests': subtests
}
})

payload = {
"pull": {
"number": int(pr_number),
"sha": os.environ.get("TRAVIS_PULL_REQUEST_SHA"),
},
"job": {
"id": int(os.environ.get("TRAVIS_JOB_ID")),
"number": os.environ.get("TRAVIS_JOB_NUMBER"),
"allow_failure": os.environ.get("TRAVIS_ALLOW_FAILURE") == 'true',
"status": status,
},
"build": {
"id": int(os.environ.get("TRAVIS_BUILD_ID")),
"number": os.environ.get("TRAVIS_BUILD_NUMBER"),
},
"product": product,
"iterations": iterations,
"message": "All results were stable." if status == "passed" else "Unstable results.",
"results": payload_results,
}

requests.post(url, json=payload)


def main():
"""Perform check_stability functionality and return exit code."""

Expand All @@ -223,6 +267,7 @@ def run(venv, wpt_args, **kwargs):
config.readfp(config_fp)
skip_tests = config.get("file detection", "skip_tests").split()
ignore_changes = set(config.get("file detection", "ignore_changes").split())
results_url = config.get("file detection", "results_url")

if kwargs["output_bytes"] is not None:
replace_streams(kwargs["output_bytes"],
Expand Down Expand Up @@ -317,8 +362,12 @@ def run(venv, wpt_args, **kwargs):
logger.info("All results were stable\n")
with TravisFold("full_results"):
write_results(logger.info, results, iterations,
pr_number=kwargs["comment_pr"],
pr_number=pr_number,
use_details=True)
if pr_number:
post_results(results, iterations=iterations, url=results_url,
product=wpt_args.product, pr_number=pr_number,
status="failed" if inconsistent else "passed")
else:
logger.info("No tests run.")

Expand Down