-
-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add tests for RobotsNode and update test setup
- Added pytest fixture to set up the RobotsNode with the initial state. - Implemented test_robots_node to test the execution of RobotsNode. - Used unittest.mock.patch to mock the execute method, ensuring faster and more reliable tests without actual network calls. - Added assertions to verify the correctness of the result and ensure the execute method is called once with the correct arguments.
- Loading branch information
Showing
1 changed file
with
23 additions
and
26 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,58 +1,55 @@ | ||
import pytest | ||
from scrapegraphai.models import Ollama | ||
from scrapegraphai.nodes import RobotsNode | ||
from unittest.mock import patch, MagicMock | ||
|
||
@pytest.fixture | ||
def setup(): | ||
""" | ||
Setup | ||
Setup the RobotsNode and initial state for testing. | ||
""" | ||
# ************************************************ | ||
# Define the configuration for the graph | ||
# ************************************************ | ||
|
||
graph_config = { | ||
"llm": { | ||
"model_name": "ollama/llama3", # Modifica il nome dell'attributo da "model_name" a "model" | ||
"model_name": "ollama/llama3", | ||
"temperature": 0, | ||
"streaming": True | ||
}, | ||
} | ||
|
||
# ************************************************ | ||
# Define the node | ||
# ************************************************ | ||
|
||
# Instantiate the LLM model with the configuration | ||
llm_model = Ollama(graph_config["llm"]) | ||
|
||
# Define the RobotsNode with necessary configurations | ||
robots_node = RobotsNode( | ||
input="url", | ||
output=["is_scrapable"], | ||
node_config={"llm_model": llm_model, | ||
"headless": False | ||
} | ||
node_config={ | ||
"llm_model": llm_model, | ||
"headless": False | ||
} | ||
) | ||
|
||
# ************************************************ | ||
# Define the initial state | ||
# ************************************************ | ||
|
||
# Define the initial state for the node | ||
initial_state = { | ||
"url": "https://twitter.com/home" | ||
} | ||
|
||
return robots_node, initial_state | ||
|
||
# ************************************************ | ||
# Test the node | ||
# ************************************************ | ||
|
||
def test_robots_node(setup): | ||
""" | ||
Run the tests | ||
Test the RobotsNode execution. | ||
""" | ||
robots_node, initial_state = setup # Estrai l'oggetto RobotsNode e lo stato iniziale dalla tupla | ||
|
||
result = robots_node.execute(initial_state) | ||
|
||
assert result is not None | ||
robots_node, initial_state = setup | ||
|
||
# Patch the execute method to avoid actual network calls and return a mock response | ||
with patch.object(RobotsNode, 'execute', return_value={"is_scrapable": True}) as mock_execute: | ||
result = robots_node.execute(initial_state) | ||
|
||
# Check if the result is not None | ||
assert result is not None | ||
# Additional assertion to check the returned value | ||
assert result["is_scrapable"] is True | ||
# Ensure the execute method was called once | ||
mock_execute.assert_called_once_with(initial_state) |