Skip to content
This repository has been archived by the owner on Oct 19, 2023. It is now read-only.

Commit

Permalink
test: integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
zac-li committed May 4, 2023
1 parent 7e811a5 commit 23d2384
Show file tree
Hide file tree
Showing 9 changed files with 151 additions and 7 deletions.
8 changes: 8 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,19 @@ jobs:
pip install -r requirements.txt
pip install --no-cache-dir ".[test]"
sudo apt-get install libsndfile1
- name: Setup monitoring stack
run: |
cd $GITHUB_WORKSPACE
docker-compose -f tests/integration/docker-compose.yml --project-directory . up --build -d --remove-orphans
- name: Test
id: test
run: |
pytest -v -s --log-cli-level=DEBUG
timeout-minutes: 30
- name: Cleanup monitoring stack
run: |
cd $GITHUB_WORKSPACE
docker-compose -f tests/integration/docker-compose.yml --project-directory . down
# just for blocking the merge until all parallel integration-tests are successful
success-all-test:
Expand Down
2 changes: 1 addition & 1 deletion examples/websockets/hitl/hitl.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

from langchain.agents import initialize_agent, load_tools
from langchain.callbacks.base import CallbackManager
from langchain.callbacks.manager import CallbackManager
from langchain.chat_models import ChatOpenAI
from langchain.llms import OpenAI

Expand Down
6 changes: 3 additions & 3 deletions lcserve/apps/babyagi/app.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from typing import Any, List

from babyagi import BabyAGI, CustomTool, PredefinedTools, get_tools, get_vectorstore
from langchain import OpenAI
from langchain.callbacks.base import CallbackManager
from lcserve import serving
from langchain.callbacks.manager import CallbackManager

from babyagi import BabyAGI, CustomTool, PredefinedTools, get_tools, get_vectorstore
from lcserve import serving


@serving(websocket=True)
Expand Down
9 changes: 8 additions & 1 deletion lcserve/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ def get_flow_dict(
module = [module]

uses = get_gateway_uses(id=gateway_id) if jcloud else get_gateway_config_yaml_path()
return {
flow_dict = {
'jtype': 'Flow',
**(get_with_args_for_jcloud() if jcloud else {}),
'gateway': {
Expand All @@ -454,6 +454,13 @@ def get_flow_dict(
},
**(get_global_jcloud_args(app_id=app_id, name=name) if jcloud else {}),
}
if os.environ.get("LCSERVE_TEST", False):
flow_dict['with'] = {
'metrics': True,
'metrics_exporter_host': 'http://localhost',
'metrics_exporter_port': 4317,
}
return flow_dict


def get_flow_yaml(
Expand Down
19 changes: 19 additions & 0 deletions tests/integration/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
version: "3"
services:
otel-collector:
image: otel/opentelemetry-collector:0.61.0
command: [ "--config=/etc/otel-collector-config.yml" ]
volumes:
- ./tests/integration/otel-collector-config.yml:/etc/otel-collector-config.yml
ports:
- "8888" # Prometheus metrics exposed by the collector
- "8889" # Prometheus exporter metrics
- "4317:4317" # OTLP gRPC receiver

prometheus:
container_name: prometheus
image: prom/prometheus:latest
volumes:
- ./tests/integration/prometheus-config.yml:/etc/prometheus/prometheus.yml
ports:
- "9090:9090"
35 changes: 35 additions & 0 deletions tests/integration/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@

import psutil
import pytest
import requests

PROMETHEUS_URL = "http://localhost:9090"


@pytest.fixture(scope="session", autouse=True)
Expand All @@ -23,6 +26,8 @@ def run_test_server(request):
+ (os.pathsep if env.get("PYTHONPATH") else "")
+ env.get("PYTHONPATH", "")
)
# Mark LCSERVE_TEST as true to make the Flow tested export metrics (to docker composed monitor stack)
env["LCSERVE_TEST"] = "true"

# Start the app
server_process = subprocess.Popen(
Expand All @@ -45,3 +50,33 @@ def kill_child_pids(pid):
children = parent.children(recursive=True)
for child in children:
os.kill(child.pid, signal.SIGTERM)


def get_values_from_prom(metrics):
response = requests.get(
f"{PROMETHEUS_URL}/api/v1/query",
params={"query": metrics},
)
assert response.status_code == 200

try:
duration_seconds = response.json()["data"]["result"][0]["value"][1]
except:
duration_seconds = 0
return duration_seconds


def examine_prom_with_retry(start_time, metrics, expected_value):
timeout = 120
interval = 10

while True:
elapsed_time = time.time() - start_time
if elapsed_time > timeout:
pytest.fail("Timed out waiting for the Prometheus data to be populated")

duration_seconds = get_values_from_prom(metrics)
if round(float(duration_seconds)) == expected_value:
break

time.sleep(interval)
24 changes: 24 additions & 0 deletions tests/integration/otel-collector-config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
receivers:
otlp:
protocols:
grpc:

exporters:
prometheus:
endpoint: "0.0.0.0:8889"
resource_to_telemetry_conversion:
enabled: true
# can be used to add additional labels
const_labels:
label1: value1

processors:
batch:

service:
extensions: []
pipelines:
metrics:
receivers: [otlp]
processors: [batch]
exporters: [prometheus]
6 changes: 6 additions & 0 deletions tests/integration/prometheus-config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
scrape_configs:
- job_name: 'otel-collector'
scrape_interval: 500ms
static_configs:
- targets: ['otel-collector:8889']
- targets: ['otel-collector:8888']
49 changes: 47 additions & 2 deletions tests/integration/test_basic_app.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import os
import json
import os
import time

import pytest
import requests
import websockets

from .helper import run_test_server
from .helper import examine_prom_with_retry, get_values_from_prom, run_test_server

HOST = "localhost:8080"
HTTP_HOST = f"http://{HOST}"
Expand Down Expand Up @@ -238,3 +239,47 @@ def test_multiple_file_uploads_with_extra_arg_http(run_test_server):
"question": "what is the file name?",
"someint": "1",
}


@pytest.mark.parametrize(
"run_test_server, route",
[("basic_app", "sync_http")],
indirect=["run_test_server"],
)
def test_metrics_http(run_test_server, route):
url = os.path.join(HTTP_HOST, route)
headers = {
"accept": "application/json",
"Content-Type": "application/json",
}
data = {"interval": 5, "envs": {}}
response = requests.post(url, headers=headers, json=data)
assert response.status_code == 200

start_time = time.time()
examine_prom_with_retry(
start_time, metrics="http_request_duration_seconds", expected_value=5
)


@pytest.mark.asyncio
@pytest.mark.parametrize(
"run_test_server, route",
[("basic_app", "sync_ws")],
indirect=["run_test_server"],
)
async def test_metrics_ws(run_test_server, route):
async with websockets.connect(os.path.join(WS_HOST, route)) as websocket:
await websocket.send(json.dumps({"interval": 1}))

received_messages = []
for _ in range(5):
message = await websocket.recv()
received_messages.append(message)

assert received_messages == ["0", "1", "2", "3", "4"]

start_time = time.time()
examine_prom_with_retry(
start_time, metrics="ws_request_duration_seconds", expected_value=5
)

0 comments on commit 23d2384

Please sign in to comment.