First commit

This commit is contained in:
Jack Jackson 2022-11-22 20:09:26 -08:00
parent eeecab3407
commit c2dd6143e6
6 changed files with 196 additions and 0 deletions

17
.drone.yml Normal file
View File

@ -0,0 +1,17 @@
kind: pipeline
name: drone-build-status-monitor
type: docker
platform:
os: linux
arch: arm64
steps:
- name: push-built-image
image: plugins/docker
settings:
registry: gitea.scubbo.org
repo: gitea.scubbo.org/scubbo/drone-build-status-monitor
username: scubbo
password:
from_secret: gitea_password

9
Dockerfile Normal file
View File

@ -0,0 +1,9 @@
FROM python:3.9.15-slim-bullseye
COPY requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
RUN rm requirements.txt
WORKDIR /app
COPY src/ src
RUN chmod +x src/app.py
CMD src/app.py

View File

@ -0,0 +1,7 @@
TODO - flesh this out more!
Exposes Prometheus metrics on port 8000. Listens on port 8015 for Webhook update events from Drone.
Environment variables:
* `ACCESS_TOKEN`
* `DRONE_DOMAIN`

3
requirements.txt Normal file
View File

@ -0,0 +1,3 @@
prometheus-client
requests
strenum

0
src/__init__.py Normal file
View File

160
src/app.py Normal file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env python3
import json
import os
import re
import requests
import sys
from dataclasses import dataclass
from enum import auto
from http.server import HTTPServer, BaseHTTPRequestHandler
from strenum import StrEnum
from typing import Iterable
from prometheus_client import start_http_server, Enum as PromEnum
from prometheus_client.metrics_core import METRIC_NAME_RE
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# TODO - extract logging configuration somewhere
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
global build_statuses
class BuildStatus(StrEnum):
running = auto()
success = auto()
failure = auto()
unknown = auto()
@dataclass
class Repo:
id: int
name: str
owner: str
build_status_tracker: PromEnum = None
def get_repos(access_token: str, drone_domain: str) -> Iterable[Repo]:
repo_response = requests.get(f'{drone_domain}/api/user/repos', headers={'Authorization': f'Bearer {access_token}'})
# TODO - must be possible to get status of a private repo if you are the owner, surely?
repo_list = [Repo(id=repo['id'], name=repo['name'], owner=repo['namespace'])
for repo in repo_response.json()
if not repo['private']]
return repo_list
def get_latest_build_status(access_token: str, drone_domain: str, owner: str, repo_name: str) -> BuildStatus:
builds = requests.get(f'{drone_domain}/api/repos/{owner}/{repo_name}/builds').json()
if len(builds) == 0:
return BuildStatus('unknown')
if repo_name == 'putiosync':
import code
code.interact(local=locals())
return BuildStatus(builds[0]['status'])
def sanitize_repo_name_for_prometheus_metric(repo_name: str) -> str:
"""
There's a strict set of characters that are permitted in metric names.
Note - this is hella janky, and will probably break if the pattern ever changes. But I'd still rather do it
this way than just copy-paste the pattern and have no indication of where it came from. Breakage is better than
accidental drifting.
"""
parsed_pattern_match = re.match('.*\[(.*?)\]\[(.*?)\].*', METRIC_NAME_RE.pattern)
repo_name_exploded = [c for c in repo_name]
for i, char in enumerate(repo_name_exploded):
if i == 0:
char_pattern = '[' + parsed_pattern_match.group(1) + ']'
else:
char_pattern = '[' + parsed_pattern_match.group(2) + ']'
if not re.match(char_pattern, char):
repo_name_exploded[i] = '_'
return ''.join(repo_name_exploded)
def make_build_status_tracker(repo_name: str, state: BuildStatus) -> PromEnum:
e = PromEnum(f'build_state_{sanitize_repo_name_for_prometheus_metric(repo_name)}',
f'Build state of {repo_name}',
states=[bs.name for bs in BuildStatus])
e.state(state.name)
return e
class BuildStatusUpdateListenerHandler(BaseHTTPRequestHandler):
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write("OK".encode('utf8'))
payload = self.rfile.read(int(self.headers.get('content-length')))
self._update_build_status(payload)
def _update_build_status(self, payload):
global build_statuses
parsed_payload = json.loads(payload.decode('utf8'))
event_type = parsed_payload['event']
if event_type != 'build':
logger.info(f'Skipping unnecessary event-type {event_type}')
repo_id = parsed_payload['repo']['id']
repo_name = parsed_payload['repo']['name']
repo_namespace = parsed_payload['repo']['namespace']
build_status = BuildStatus(parsed_payload['repo']['build']['status'])
logger.info(f'DEBUG - {repo_id=}\t{repo_name=}\t{repo_namespace=}\t{build_status=}')
if repo_name in build_statuses:
status_tracker = build_statuses[repo_name].build_status_tracker
old_status = status_tracker._states[status_tracker._value]
build_statuses[repo_name].build_status_tracker.state(build_status)
logger.info(f'Updated status for {repo_name} from {old_status} to {build_status}')
else:
build_statuses[repo_name] = Repo(
id=repo_id,
name=repo_name,
owner=repo_namespace,
build_status_tracker=make_build_status_tracker(repo_name, build_status))
logger.info(f'Created a tracker for {repo_name} in state {build_status}')
def create_promEnums():
# Ugh, global variables. But this is quick and dirty for a proof-of-concept.
global build_statuses
access_token = os.environ['ACCESS_TOKEN']
drone_domain = os.environ['DRONE_DOMAIN']
repo_list = get_repos(access_token, drone_domain)
build_statuses = {}
for repo in repo_list:
repo.build_status_tracker =\
make_build_status_tracker(
repo.name,
get_latest_build_status(access_token, drone_domain, repo.owner, repo.name))
build_statuses[repo.name] = repo
logger.info(build_statuses)
if __name__ == '__main__':
create_promEnums()
logger.info('Created promEnums')
# Start up the Prometheus server to expose the metrics.
start_http_server(8000)
logger.info('Started up the prometheus server')
# Start a listener to accept webhooks from Drone
httpd = HTTPServer(('', 8015), BuildStatusUpdateListenerHandler)
logger.info('About to start the HTTP server...')
httpd.serve_forever()
# TODO - periodic sync in case webhook data has been missed