Compare commits

...

5 Commits

Author SHA1 Message Date
dc08ba41ca feat: add health check and heartbeat endpoint to Docker setup
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m27s
2025-02-15 14:12:34 +01:00
0518d1ba48 feat: integrate yaspin for improved logging and error handling in Chrome driver setup
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m5s
2025-02-14 15:32:14 +01:00
f579000a96 docs: simplify Docker commands in README and enhance logging in WebRTC stats exporter
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m16s
2025-02-13 18:31:09 +01:00
752cea15d4 docs: reorganize server setup instructions and create dedicated server README
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m6s
2025-02-12 20:24:29 +01:00
5be06ec11f docs: update README with improved Docker commands and add credits section
All checks were successful
Build and Push Docker Image / build (push) Successful in 2m33s
feat: enhance WebRTC stats collection by logging connection state changes
2025-02-12 19:50:37 +01:00
8 changed files with 88 additions and 59 deletions

View File

@@ -4,10 +4,15 @@ FROM python:3.13.1-slim-bookworm
COPY requirements.txt /app/requirements.txt COPY requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir -r /app/requirements.txt RUN pip install --no-cache-dir -r /app/requirements.txt
# Install curl
RUN apt-get update && apt-get install -y curl
# Copy the application # Copy the application
COPY main.py /app COPY main.py /app
COPY utils/ /app/utils COPY utils/ /app/utils
WORKDIR /app WORKDIR /app
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 CMD curl -f http://localhost/heartbeat || exit 1
# Run the application # Run the application
CMD ["python", "main.py"] CMD ["python", "main.py"]

View File

@@ -15,7 +15,7 @@ peertube-collector is a project designed to collect and analyze WebRTC statistic
Ports can be opened in the host machine's firewall with: Ports can be opened in the host machine's firewall with:
```sh ```sh
ufw allow from 172.30.0.0/16 to any port 4444 ufw allow from 172.100.0.0/16 to any port 4444
``` ```
#### External (OPTIONAL): #### External (OPTIONAL):
@@ -48,7 +48,7 @@ ufw allow 27107/tcp
4. Start the Docker containers: 4. Start the Docker containers:
```sh ```sh
docker compose up docker compose up --abort-on-container-exit
``` ```
or in detached mode: or in detached mode:
```sh ```sh
@@ -97,29 +97,8 @@ peertube-collector/
└── webrtc-internals-exporter/ └── webrtc-internals-exporter/
``` ```
--- # Credits
# Server - [WebRTC Internals Exporter](https://github.com/vpalmisano/webrtc-internals-exporter)
- [WebRTC debugging with Prometheus/Grafana](https://medium.com/@vpalmisano/webrtc-debugging-with-prometheus-grafana-254b6ac71063)
The repository contains a `server` directory with a simple MongoDB server (with initializations scripts) and WebUI that serves the WebRTC stats collected by the collector. - [MongoDB Docker Compose examples](https://github.com/TGITS/docker-compose-examples/tree/main/mongodb-docker-compose-examples)
Based this awesome example configuration: [MongoDB Docker Compose examples](https://github.com/TGITS/docker-compose-examples/tree/main/mongodb-docker-compose-examples).
## Setup
1. Change to the `server` directory:
```sh
cd server
```
2. Create and configure the environment file based on the `.env.example` file:
```sh
cp .env.example .env
```
3. Start the Docker containers:
```sh
docker compose up
```
The WebUI control panel will be available at [http://localhost:8081](http://localhost:8081).

View File

@@ -14,6 +14,7 @@ services:
interval: 5s interval: 5s
timeout: 10s timeout: 10s
retries: 5 retries: 5
pull_policy: always
network_mode: host network_mode: host
telegraf: telegraf:
@@ -30,6 +31,7 @@ services:
interval: 5s interval: 5s
timeout: 10s timeout: 10s
retries: 5 retries: 5
pull_policy: always
networks: networks:
- backend - backend
@@ -50,6 +52,7 @@ services:
- "9092:9092" - "9092:9092"
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
pull_policy: always
networks: networks:
- backend - backend
@@ -57,4 +60,4 @@ networks:
backend: backend:
ipam: ipam:
config: config:
- subnet: 172.30.0.0/16 - subnet: 172.100.0.0/16

16
main.py
View File

@@ -4,6 +4,7 @@ import time
import socket import socket
import logging import logging
import os import os
from yaspin import yaspin
from functools import partial from functools import partial
from http.server import HTTPServer from http.server import HTTPServer
from utils.PostHandler import Handler from utils.PostHandler import Handler
@@ -34,6 +35,7 @@ def interrupt_handler(signum, driver: webdriver.Remote):
driver.quit() driver.quit()
raise SystemExit raise SystemExit
@yaspin()
def setupChromeDriver(): def setupChromeDriver():
logger.log(logging.INFO, 'Setting up Chrome driver.') logger.log(logging.INFO, 'Setting up Chrome driver.')
chrome_options = Options() chrome_options = Options()
@@ -159,7 +161,8 @@ def downloadStats(driver: webdriver.Chrome, peersDict: dict):
def convert_to_bytes(down, downUnit): def convert_to_bytes(down, downUnit):
return float(down) * (1024 ** {'B': 0, 'KB': 1, 'MB': 2, 'GB': 3}[downUnit]) return float(down) * (1024 ** {'B': 0, 'KB': 1, 'MB': 2, 'GB': 3}[downUnit])
@yaspin()
def setupStats(driver: webdriver.Remote, url: str): def setupStats(driver: webdriver.Remote, url: str):
logger.log(logging.INFO, 'Setting up stats.') logger.log(logging.INFO, 'Setting up stats.')
actions = ActionChains(driver) actions = ActionChains(driver)
@@ -167,7 +170,13 @@ def setupStats(driver: webdriver.Remote, url: str):
driver.get(url) driver.get(url)
wait.until(ec.presence_of_element_located((By.CLASS_NAME, 'vjs-big-play-button'))) try:
wait.until(ec.presence_of_element_located((By.CLASS_NAME, 'vjs-big-play-button')))
except Exception:
logger.error('Timeout while waiting for the big play button to be present.')
driver.quit()
raise SystemExit(1)
actions.click(driver.find_element(By.CLASS_NAME ,'video-js')).perform() actions.click(driver.find_element(By.CLASS_NAME ,'video-js')).perform()
wait.until(ec.visibility_of_element_located((By.CLASS_NAME, 'vjs-control-bar'))) wait.until(ec.visibility_of_element_located((By.CLASS_NAME, 'vjs-control-bar')))
actions.context_click(driver.find_element(By.CLASS_NAME ,'video-js')).perform() actions.context_click(driver.find_element(By.CLASS_NAME ,'video-js')).perform()
@@ -193,7 +202,6 @@ if __name__ == '__main__':
setupStats(driver, url) setupStats(driver, url)
logger.log(logging.INFO, 'Starting server collector.') logger.info('Starting server collector.')
httpd = HTTPServer(('', 9092), partial(Handler, downloadStats, driver, logger)) httpd = HTTPServer(('', 9092), partial(Handler, downloadStats, driver, logger))
logger.info('Server collector started.')
httpd.serve_forever() httpd.serve_forever()

View File

@@ -1,2 +1,3 @@
selenium selenium
beautifulsoup4 beautifulsoup4
yaspin

30
server/README.md Normal file
View File

@@ -0,0 +1,30 @@
# Server
The repository contains a `server` directory with a simple MongoDB server (with initializations scripts) and WebUI that serves the WebRTC stats collected by the collector.
It's not mandatory to run and use this service, it's provided just as an example of how to store collected data.
## Setup
1. Change to the `server` directory:
```sh
cd server
```
2. Create and configure the environment file based on the `.env.example` file:
```sh
cp .env.example .env
```
3. Start the Docker containers:
```sh
docker compose up
```
The WebUI control panel will be available at [http://localhost:8081](http://localhost:8081).
# Credits
- [WebRTC Internals Exporter](https://github.com/vpalmisano/webrtc-internals-exporter)
- [WebRTC debugging with Prometheus/Grafana](https://medium.com/@vpalmisano/webrtc-debugging-with-prometheus-grafana-254b6ac71063)
- [MongoDB Docker Compose examples](https://github.com/TGITS/docker-compose-examples/tree/main/mongodb-docker-compose-examples)

View File

@@ -24,6 +24,11 @@ class Handler(BaseHTTPRequestHandler):
self.wfile.write(b'404 Not Found') self.wfile.write(b'404 Not Found')
def do_GET(self): def do_GET(self):
self.send_response(404) if self.path == '/heartbeat':
self.end_headers() self.send_response(200)
self.wfile.write(b'404 Not Found') self.end_headers()
self.wfile.write(b'Heartbeat OK')
else:
self.send_response(404)
self.end_headers()
self.wfile.write(b'404 Not Found')

View File

@@ -22,7 +22,7 @@ class WebrtcInternalExporter {
}); });
window.postMessage({ event: "webrtc-internal-exporter:ready" }); window.postMessage({ event: "webrtc-internal-exporter:ready" });
this.collectAllStats(); setInterval(() => this.collectAndPostAllStats(), this.updateInterval);
} }
randomId() { randomId() {
@@ -40,6 +40,9 @@ class WebrtcInternalExporter {
pc.iceCandidateErrors = []; pc.iceCandidateErrors = [];
this.peerConnections.set(id, pc); this.peerConnections.set(id, pc);
pc.addEventListener("connectionstatechange", () => { pc.addEventListener("connectionstatechange", () => {
log(`connectionStateChange: ${pc.connectionState}`);
this.collectAndPostAllStats();
if (pc.connectionState === "closed") { if (pc.connectionState === "closed") {
this.peerConnections.delete(id); this.peerConnections.delete(id);
} }
@@ -83,24 +86,27 @@ class WebrtcInternalExporter {
} }
async collectAndPostSingleStat(id) { async collectAndPostSingleStat(id) {
const stats = await this.collectStats(id, this.collectAndPostSingleStat); const stats = await this.collectStats(id);
if (Object.keys(stats).length === 0 || !stats) return; if (Object.keys(stats).length === 0 || !stats) return;
window.postMessage( window.postMessage(
{ {
event: "webrtc-internal-exporter:peer-connection-stats", event: "webrtc-internal-exporter:peer-connection-stats",
stats stats: [stats]
}, },
stats [stats]
); );
log(`Single stat collected:`, [stats]);
} }
async collectAllStats() { async collectAndPostAllStats() {
const stats = []; const stats = [];
for (const [id, pc] of this.peerConnections) { for (const [id] of this.peerConnections) {
if (this.url && this.enabled) { if (this.url && this.enabled) {
const pcStats = await this.collectStats(id, pc); const pcStats = await this.collectStats(id);
if (Object.keys(pcStats).length === 0 || !pcStats) continue;
stats.push(pcStats); stats.push(pcStats);
} }
} }
@@ -108,28 +114,24 @@ class WebrtcInternalExporter {
window.postMessage( window.postMessage(
{ {
event: "webrtc-internal-exporter:peer-connections-stats", event: "webrtc-internal-exporter:peer-connections-stats",
data: JSON.parse(JSON.stringify(stats)), data: stats
}, },
stats
); );
log(`Stats collected:`, JSON.parse(JSON.stringify(stats))); log(`Stats collected:`, stats);
setTimeout(this.collectAllStats.bind(this), this.updateInterval);
return stats; return stats;
} }
/** /**
* @param {string} id * @param {string} id
* @param {RTCPeerConnection} pc
* @param {Function} binding
*/ */
async collectStats(id, pc, binding) { async collectStats(id) {
var completeStats = {}; var pc = this.peerConnections.get(id);
if (!pc) return;
if (!pc) { var completeStats = {};
pc = this.peerConnections.get(id);
if (!pc) return;
}
if (this.url && this.enabled) { if (this.url && this.enabled) {
try { try {
@@ -157,10 +159,6 @@ class WebrtcInternalExporter {
if (pc.connectionState === "closed") { if (pc.connectionState === "closed") {
this.peerConnections.delete(id); this.peerConnections.delete(id);
} else {
if (binding) {
setTimeout(binding.bind(this), this.updateInterval, id);
}
} }
return completeStats; return completeStats;