From 1fbba0e96e564763be2ff35bf03c841da44be447 Mon Sep 17 00:00:00 2001 From: jagtarcontlo <123375045+jagtarcontlo@users.noreply.github.com> Date: Fri, 16 Jun 2023 13:28:06 +0530 Subject: [PATCH] Improve logging functionality with logger integration --- cli2.py | 9 +-- run_gui.py | 9 +-- superagi/agent/output_parser.py | 20 +++--- superagi/agent/super_agi.py | 17 ++--- superagi/config/config.py | 3 +- superagi/controllers/config.py | 7 +- superagi/controllers/organisation.py | 3 +- superagi/controllers/project.py | 3 +- superagi/controllers/resources.py | 6 +- superagi/controllers/user.py | 3 +- superagi/helper/github_helper.py | 34 +++++----- superagi/helper/google_search.py | 9 +-- superagi/helper/json_cleaner.py | 3 +- superagi/helper/resource_helper.py | 3 +- superagi/helper/s3_helper.py | 3 +- superagi/helper/token_counter.py | 5 +- superagi/helper/webpage_extractor.py | 17 ++--- superagi/jobs/agent_executor.py | 7 +- superagi/lib/logger.py | 64 +++++++++++++++++++ superagi/llms/openai.py | 3 +- superagi/message_broker/kafka.py | 6 +- superagi/message_broker/redis.py | 5 +- superagi/models/agent.py | 3 +- superagi/models/db.py | 5 +- superagi/tools/code/tools.py | 3 +- superagi/tools/file/write_file.py | 3 +- superagi/tools/github/delete_file.py | 3 +- superagi/tools/human/tool.py | 6 +- .../tools/image_generation/dalle_image_gen.py | 3 +- superagi/tools/searx/search_scraper.py | 3 +- superagi/tools/thinking/tools.py | 3 +- superagi/worker.py | 3 +- test.py | 15 +++-- ui.py | 10 +-- 34 files changed, 196 insertions(+), 103 deletions(-) create mode 100644 superagi/lib/logger.py diff --git a/cli2.py b/cli2.py index 36f0b911f..216f95e02 100644 --- a/cli2.py +++ b/cli2.py @@ -5,11 +5,12 @@ import shutil from sys import platform from multiprocessing import Process +from superagi.lib.logger import logger def check_command(command, message): if not shutil.which(command): - print(message) + logger.info(message) sys.exit(1) @@ -18,7 +19,7 @@ def run_npm_commands(shell=False): try: subprocess.run(["npm", "install"], check=True, shell=shell) except subprocess.CalledProcessError: - print(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") + logger.error(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") sys.exit(1) os.chdir("..") @@ -36,11 +37,11 @@ def run_server(shell=False,a_name=None,a_description=None,goals=None): def cleanup(api_process, ui_process, celery_process): - print("Shutting down processes...") + logger.info("Shutting down processes...") api_process.terminate() ui_process.terminate() celery_process.terminate() - print("Processes terminated. Exiting.") + logger.info("Processes terminated. Exiting.") sys.exit(1) diff --git a/run_gui.py b/run_gui.py index c36501cf5..1e2e0e7ff 100644 --- a/run_gui.py +++ b/run_gui.py @@ -3,10 +3,11 @@ import subprocess from time import sleep import shutil +from superagi.lib.logger import logger def check_command(command, message): if not shutil.which(command): - print(message) + logger.info(message) sys.exit(1) def run_npm_commands(): @@ -14,7 +15,7 @@ def run_npm_commands(): try: subprocess.run(["npm", "install"], check=True) except subprocess.CalledProcessError: - print(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") + logger.error(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") sys.exit(1) os.chdir("..") @@ -26,10 +27,10 @@ def run_server(): return api_process, ui_process def cleanup(api_process, ui_process): - print("Shutting down processes...") + logger.info("Shutting down processes...") api_process.terminate() ui_process.terminate() - print("Processes terminated. Exiting.") + logger.info("Processes terminated. Exiting.") sys.exit(1) if __name__ == "__main__": diff --git a/superagi/agent/output_parser.py b/superagi/agent/output_parser.py index d44264bf4..9166e3446 100644 --- a/superagi/agent/output_parser.py +++ b/superagi/agent/output_parser.py @@ -3,6 +3,7 @@ from typing import Dict, NamedTuple, List import re from superagi.helper.json_cleaner import JsonCleaner +from superagi.lib.logger import logger class AgentGPTAction(NamedTuple): @@ -25,7 +26,7 @@ def parse(self, text: str) -> AgentGPTAction: class AgentOutputParser(BaseOutputParser): def parse(self, text: str) -> AgentGPTAction: try: - print(text) + logger.info(text) text = JsonCleaner.check_and_clean_json(text) parsed = json.loads(text, strict=False) except json.JSONDecodeError: @@ -38,24 +39,25 @@ def parse(self, text: str) -> AgentGPTAction: format_suffix_yellow = "\033[0m\033[0m" format_prefix_green = "\033[92m\033[1m" format_suffix_green = "\033[0m\033[0m" - print(format_prefix_green + "Intelligence : " + format_suffix_green) + logger.info(format_prefix_green + "Intelligence : " + format_suffix_green) if "text" in parsed["thoughts"]: - print(format_prefix_yellow + "Thoughts: " + format_suffix_yellow + parsed["thoughts"]["text"] + "\n") + logger.info(format_prefix_yellow + "Thoughts: " + format_suffix_yellow + parsed["thoughts"]["text"] + "\n") + if "reasoning" in parsed["thoughts"]: - print(format_prefix_yellow + "Reasoning: " + format_suffix_yellow + parsed["thoughts"]["reasoning"] + "\n") + logger.info(format_prefix_yellow + "Reasoning: " + format_suffix_yellow + parsed["thoughts"]["reasoning"] + "\n") if "plan" in parsed["thoughts"]: - print(format_prefix_yellow + "Plan: " + format_suffix_yellow + parsed["thoughts"]["plan"] + "\n") + logger.info(format_prefix_yellow + "Plan: " + format_suffix_yellow + parsed["thoughts"]["plan"] + "\n") if "criticism" in parsed["thoughts"]: - print(format_prefix_yellow + "Criticism: " + format_suffix_yellow + parsed["thoughts"]["criticism"] + "\n") + logger.info(format_prefix_yellow + "Criticism: " + format_suffix_yellow + parsed["thoughts"]["criticism"] + "\n") - print(format_prefix_green + "Action : " + format_suffix_green) + logger.info(format_prefix_green + "Action : " + format_suffix_green) # print(format_prefix_yellow + "Args: "+ format_suffix_yellow + parsed["tool"]["args"] + "\n") if parsed["tool"] is None or not parsed["tool"]: return AgentGPTAction(name="", args="") if "name" in parsed["tool"]: - print(format_prefix_yellow + "Tool: " + format_suffix_yellow + parsed["tool"]["name"] + "\n") + logger.info(format_prefix_yellow + "Tool: " + format_suffix_yellow + parsed["tool"]["name"] + "\n") return AgentGPTAction( name=parsed["tool"]["name"], args=parsed["tool"]["args"], @@ -78,7 +80,7 @@ def parse_tasks(self, text: str) -> AgentTasks: error=f"Could not parse invalid json: {text}", ) try: - print("Tasks: ", parsed["tasks"]) + logger.info("Tasks: ", parsed["tasks"]) return AgentTasks( tasks=parsed["tasks"] ) diff --git a/superagi/agent/super_agi.py b/superagi/agent/super_agi.py index 1080cb8f1..b33addd4c 100644 --- a/superagi/agent/super_agi.py +++ b/superagi/agent/super_agi.py @@ -32,6 +32,7 @@ from superagi.models.resource import Resource from superagi.config.config import get_config import os +from superagi.lib.logger import logger FINISH = "finish" WRITE_FILE = "Write File" @@ -143,7 +144,7 @@ def execute(self, workflow_step: AgentWorkflowStep): # agent_id=self.agent_config["agent_id"], feed=template_step.prompt, # role="user") - print(prompt) + logger.info(prompt) if len(agent_feeds) <= 0: for message in messages: agent_execution_feed = AgentExecutionFeed(agent_execution_id=self.agent_config["agent_execution_id"], @@ -184,7 +185,7 @@ def execute(self, workflow_step: AgentWorkflowStep): for task in reversed(tasks): task_queue.add_task(task) if len(tasks) > 0: - print("Tasks reprioritized in order: " + str(tasks)) + logger.info("Tasks reprioritized in order: " + str(tasks)) current_tasks = task_queue.get_tasks() if len(current_tasks) == 0: final_response = {"result": "COMPLETE", "pending_task_count": 0} @@ -195,7 +196,7 @@ def execute(self, workflow_step: AgentWorkflowStep): for task in reversed(tasks): task_queue.add_task(task) if len(tasks) > 0: - print("Adding task to queue: " + str(tasks)) + logger.info("Adding task to queue: " + str(tasks)) for task in tasks: agent_execution_feed = AgentExecutionFeed(agent_execution_id=self.agent_config["agent_execution_id"], agent_id=self.agent_config["agent_id"], @@ -215,7 +216,7 @@ def execute(self, workflow_step: AgentWorkflowStep): final_response["result"] = "PENDING" session.commit() - print("Iteration completed moving to next iteration!") + logger.info("Iteration completed moving to next iteration!") session.close() return final_response @@ -224,15 +225,15 @@ def handle_tool_response(self, assistant_reply): tools = {t.name: t for t in self.tools} if action.name == FINISH or action.name == "": - print("\nTask Finished :) \n") + logger.info("\nTask Finished :) \n") output = {"result": "COMPLETE", "retry": False} return output if action.name in tools: tool = tools[action.name] try: observation = tool.execute(action.args) - print("Tool Observation : ") - print(observation) + logger.info("Tool Observation : ") + logger.info(observation) except ValidationError as e: observation = ( @@ -255,7 +256,7 @@ def handle_tool_response(self, assistant_reply): ) output = {"result": result, "retry": True} - print("Tool Response : " + str(output) + "\n") + logger.info("Tool Response : " + str(output) + "\n") return output def update_agent_execution_tokens(self, current_calls, total_tokens): diff --git a/superagi/config/config.py b/superagi/config/config.py index e41d5aa47..54067960f 100644 --- a/superagi/config/config.py +++ b/superagi/config/config.py @@ -2,6 +2,7 @@ from pydantic import BaseSettings from pathlib import Path import yaml +from superagi.lib.logger import logger CONFIG_FILE = "config.yaml" @@ -21,7 +22,7 @@ def load_config(cls, config_file: str) -> dict: config_data = {} else: # If config file doesn't exist, prompt for credentials and create new file - print("\033[91m\033[1m" + logger.info("\033[91m\033[1m" + "\nConfig file not found. Enter required keys and values." + "\033[0m\033[0m") config_data = { diff --git a/superagi/controllers/config.py b/superagi/controllers/config.py index 7ee86d331..08bc6131b 100644 --- a/superagi/controllers/config.py +++ b/superagi/controllers/config.py @@ -8,6 +8,7 @@ from superagi.helper.auth import check_auth from fastapi_jwt_auth import AuthJWT from superagi.helper.encyption_helper import encrypt_data,decrypt_data +from superagi.lib.logger import logger router = APIRouter() @@ -40,10 +41,10 @@ def create_config(config: sqlalchemy_to_pydantic(Configuration, exclude=["id"]), db.session.flush() return existing_config - print("NEW CONFIG") + logger.info("NEW CONFIG") new_config = Configuration(organisation_id=organisation_id, key=config.key, value=config.value) - print(new_config) - print("ORGANISATION ID : ",organisation_id) + logger.info(new_config) + logger.info("ORGANISATION ID : ",organisation_id) db.session.add(new_config) db.session.commit() db.session.flush() diff --git a/superagi/controllers/organisation.py b/superagi/controllers/organisation.py index e7986a5b9..840b6266c 100644 --- a/superagi/controllers/organisation.py +++ b/superagi/controllers/organisation.py @@ -8,6 +8,7 @@ from superagi.helper.auth import check_auth from superagi.models.project import Project from superagi.models.user import User +from superagi.lib.logger import logger router = APIRouter() @@ -25,7 +26,7 @@ def create_organisation(organisation: sqlalchemy_to_pydantic(Organisation, exclu db.session.add(new_organisation) db.session.commit() db.session.flush() - print(new_organisation) + logger.info(new_organisation) return new_organisation diff --git a/superagi/controllers/project.py b/superagi/controllers/project.py index 3db49d07a..bd9afdd16 100644 --- a/superagi/controllers/project.py +++ b/superagi/controllers/project.py @@ -6,6 +6,7 @@ from fastapi import APIRouter from pydantic_sqlalchemy import sqlalchemy_to_pydantic from superagi.helper.auth import check_auth +from superagi.lib.logger import logger router = APIRouter() @@ -17,7 +18,7 @@ def create_project(project: sqlalchemy_to_pydantic(Project, exclude=["id"]), """Create a new project""" - print("Organisation_id : ", project.organisation_id) + logger.info("Organisation_id : ", project.organisation_id) organisation = db.session.query(Organisation).get(project.organisation_id) if not organisation: diff --git a/superagi/controllers/resources.py b/superagi/controllers/resources.py index b05f58627..504cebd98 100644 --- a/superagi/controllers/resources.py +++ b/superagi/controllers/resources.py @@ -20,7 +20,7 @@ from botocore.exceptions import NoCredentialsError import tempfile import requests - +from superagi.lib.logger import logger router = APIRouter() @@ -63,7 +63,7 @@ async def upload(agent_id: int, file: UploadFile = File(...), name=Form(...), si path = 'input/'+file_name[0]+ '_'+str(datetime.datetime.now()).replace(' ','').replace('.','').replace(':','')+'.'+file_name[1] try: s3.upload_fileobj(file.file, bucket_name, path) - print("File uploaded successfully!") + logger.info("File uploaded successfully!") except NoCredentialsError: raise HTTPException(status_code=500, detail="AWS credentials not found. Check your configuration.") @@ -72,7 +72,7 @@ async def upload(agent_id: int, file: UploadFile = File(...), name=Form(...), si db.session.add(resource) db.session.commit() db.session.flush() - print(resource) + logger.info(resource) return resource diff --git a/superagi/controllers/user.py b/superagi/controllers/user.py index 70faa29bf..973836500 100644 --- a/superagi/controllers/user.py +++ b/superagi/controllers/user.py @@ -8,6 +8,7 @@ from fastapi import APIRouter from pydantic_sqlalchemy import sqlalchemy_to_pydantic from superagi.helper.auth import check_auth +from superagi.lib.logger import logger router = APIRouter() @@ -25,7 +26,7 @@ def create_user(user: sqlalchemy_to_pydantic(User, exclude=["id"]), db.session.flush() organisation = Organisation.find_or_create_organisation(db.session, db_user) Project.find_or_create_default_project(db.session, organisation.id) - print("User created", db_user) + logger.info("User created", db_user) return db_user diff --git a/superagi/helper/github_helper.py b/superagi/helper/github_helper.py index 4e3f9bad1..f7ac49d2c 100644 --- a/superagi/helper/github_helper.py +++ b/superagi/helper/github_helper.py @@ -1,5 +1,6 @@ import base64 import requests +from superagi.lib.logger import logger class GithubHelper: @@ -25,7 +26,7 @@ def check_repository_visibility(self, repository_owner, repository_name): repository_data = response.json() return repository_data['private'] else: - print(f"Failed to fetch repository information: {response.status_code} - {response.text}") + logger.info(f"Failed to fetch repository information: {response.status_code} - {response.text}") return None def search_repo(self, repository_owner, repository_name, file_name, folder_path=None): @@ -53,19 +54,18 @@ def sync_branch(self, repository_owner, repository_name, base_branch, head_branc } response = requests.patch(head_branch_url, json=data, headers=headers) if response.status_code == 200: - print( - f'Successfully synced {self.github_username}:{head_branch} branch with {repository_owner}:{base_branch}') + logger.info(f'Successfully synced {self.github_username}:{head_branch} branch with {repository_owner}:{base_branch}') else: - print('Failed to sync the branch. Check your inputs and permissions.') + logger.info('Failed to sync the branch. Check your inputs and permissions.') def make_fork(self, repository_owner, repository_name, base_branch, headers): fork_url = f'https://api.github.com/repos/{repository_owner}/{repository_name}/forks' fork_response = requests.post(fork_url, headers=headers) if fork_response.status_code == 202: - print('Fork created successfully.') + logger.info('Fork created successfully.') self.sync_branch(repository_owner, repository_name, base_branch, base_branch, headers) else: - print('Failed to create the fork:', fork_response.json()['message']) + logger.info('Failed to create the fork:', fork_response.json()['message']) return fork_response.status_code @@ -79,11 +79,11 @@ def create_branch(self, repository_name, base_branch, head_branch, headers): } branch_response = requests.post(branch_url, json=branch_params, headers=headers) if branch_response.status_code == 201: - print('Branch created successfully.') + logger.info('Branch created successfully.') elif branch_response.status_code == 422: - print('Branch new-file already exists, making commits to new-file branch') + logger.info('Branch new-file already exists, making commits to new-file branch') else: - print('Failed to create branch:', branch_response.json()['message']) + logger.info('Failed to create branch:', branch_response.json()['message']) return branch_response.status_code @@ -97,9 +97,9 @@ def delete_file(self, repository_name, file_name, folder_path, commit_message, h } file_response = requests.delete(file_url, json=file_params, headers=headers) if file_response.status_code == 200: - print('File or folder delete successfully.') + logger.info('File or folder delete successfully.') else: - print('Failed to Delete file or folder:', file_response.json()) + logger.info('Failed to Delete file or folder:', file_response.json()) return file_response.status_code @@ -117,11 +117,11 @@ def add_file(self, repository_owner, repository_name, file_name, folder_path, he } file_response = requests.put(file_url, json=file_params, headers=headers) if file_response.status_code == 201: - print('File content uploaded successfully.') + logger.info('File content uploaded successfully.') elif file_response.status_code == 422: - print('File already exists') + logger.info('File already exists') else: - print('Failed to upload file content:', file_response.json()['message']) + logger.info('Failed to upload file content:', file_response.json()['message']) return file_response.status_code def create_pull_request(self, repository_owner, repository_name, head_branch, base_branch, headers): @@ -136,11 +136,11 @@ def create_pull_request(self, repository_owner, repository_name, head_branch, ba pr_response = requests.post(pull_request_url, json=pull_request_params, headers=headers) if pr_response.status_code == 201: - print('Pull request created successfully.') + logger.info('Pull request created successfully.') elif pr_response.status_code == 422: - print('Added changes to already existing pull request') + logger.info('Added changes to already existing pull request') else: - print('Failed to create pull request:', pr_response.json()['message']) + logger.info('Failed to create pull request:', pr_response.json()['message']) return pr_response.status_code diff --git a/superagi/helper/google_search.py b/superagi/helper/google_search.py index b4d6285c6..17b3031cc 100644 --- a/superagi/helper/google_search.py +++ b/superagi/helper/google_search.py @@ -1,6 +1,7 @@ import requests import time from pydantic import BaseModel +from superagi.lib.logger import logger from superagi.helper.webpage_extractor import WebpageExtractor @@ -37,11 +38,11 @@ def search_run(self, query): all_snippets.append(item["snippet"]) links.append(item["link"]) else: - print("No items found in the response.") + logger.info("No items found in the response.") except ValueError as e: - print(f"Error while parsing JSON data: {e}") + logger.error(f"Error while parsing JSON data: {e}") else: - print(f"Error: {response.status_code}") + logger.error(f"Error: {response.status_code}") return all_snippets, links, response.status_code @@ -52,7 +53,7 @@ def get_result(self, query): attempts = 0 while snippets == [] and attempts < 2: attempts += 1 - print("Google blocked the request. Trying again...") + logger.info("Google blocked the request. Trying again...") time.sleep(3) snippets, links, error_code = self.search_run(query) diff --git a/superagi/helper/json_cleaner.py b/superagi/helper/json_cleaner.py index f5b43119f..c3fab3cb5 100644 --- a/superagi/helper/json_cleaner.py +++ b/superagi/helper/json_cleaner.py @@ -1,5 +1,6 @@ import json import re +from superagi.lib.logger import logger class JsonCleaner: @@ -22,7 +23,7 @@ def check_and_clean_json(cls, json_string: str): json.loads(json_string) return json_string except json.JSONDecodeError as e: - print(json_string) + logger.info(json_string) # If the json is still invalid, try to extract the json section json_string = cls.extract_json_section(json_string) return json_string diff --git a/superagi/helper/resource_helper.py b/superagi/helper/resource_helper.py index 7fded30f2..68b870684 100644 --- a/superagi/helper/resource_helper.py +++ b/superagi/helper/resource_helper.py @@ -2,6 +2,7 @@ from superagi.models.resource import Resource import os import datetime +from superagi.lib.logger import logger class ResourceHelper: @@ -39,7 +40,7 @@ def make_written_file_resource(file_name: str, agent_id: int, file, channel): else: path = 'output' - print(path + "/" + file_name) + logger.info(path + "/" + file_name) resource = Resource(name=file_name, path=path + "/" + file_name, storage_type=storage_type, size=file_size, type=file_type, channel="OUTPUT", diff --git a/superagi/helper/s3_helper.py b/superagi/helper/s3_helper.py index c06c08f98..dacd8d96c 100644 --- a/superagi/helper/s3_helper.py +++ b/superagi/helper/s3_helper.py @@ -1,6 +1,7 @@ import boto3 from superagi.config.config import get_config from fastapi import HTTPException +from superagi.lib.logger import logger class S3Helper: def __init__(self): @@ -14,6 +15,6 @@ def __init__(self): def upload_file(self, file, path): try: self.s3.upload_fileobj(file, self.bucket_name, path) - print("File uploaded to S3 successfully!") + logger.info("File uploaded to S3 successfully!") except: raise HTTPException(status_code=500, detail="AWS credentials not found. Check your configuration.") diff --git a/superagi/helper/token_counter.py b/superagi/helper/token_counter.py index 793ceafa6..c6293d00c 100644 --- a/superagi/helper/token_counter.py +++ b/superagi/helper/token_counter.py @@ -3,6 +3,7 @@ import tiktoken from superagi.types.common import BaseMessage +from superagi.lib.logger import logger class TokenCounter: @staticmethod @@ -11,7 +12,7 @@ def token_limit(model: str = "gpt-3.5-turbo-0301") -> int: model_token_limit_dict = {"gpt-3.5-turbo-0301": 4032, "gpt-4-0314": 8092, "gpt-3.5-turbo": 4032, "gpt-4": 8092} return model_token_limit_dict[model] except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning("Warning: model not found. Using cl100k_base encoding.") return 8092 @staticmethod @@ -20,7 +21,7 @@ def count_message_tokens(messages: List[BaseMessage], model: str = "gpt-3.5-turb model_token_per_message_dict = {"gpt-3.5-turbo-0301": 4, "gpt-4-0314": 3, "gpt-3.5-turbo": 4, "gpt-4": 3} encoding = tiktoken.encoding_for_model(model) except KeyError: - print("Warning: model not found. Using cl100k_base encoding.") + logger.warning("Warning: model not found. Using cl100k_base encoding.") encoding = tiktoken.get_encoding("cl100k_base") tokens_per_message = model_token_per_message_dict[model] diff --git a/superagi/helper/webpage_extractor.py b/superagi/helper/webpage_extractor.py index 4f98dc914..1379afb27 100644 --- a/superagi/helper/webpage_extractor.py +++ b/superagi/helper/webpage_extractor.py @@ -10,6 +10,7 @@ import time import random from lxml import html +from superagi.lib.logger import logger USER_AGENTS = [ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3", @@ -59,15 +60,15 @@ def extract_with_3k(self, url): return content[:1500] except ArticleException as ae: - print(f"Error while extracting text from HTML (newspaper3k): {str(ae)}") + logger.error(f"Error while extracting text from HTML (newspaper3k): {str(ae)}") return f"Error while extracting text from HTML (newspaper3k): {str(ae)}" except RequestException as re: - print(f"Error while making the request to the URL (newspaper3k): {str(re)}") + logger.error(f"Error while making the request to the URL (newspaper3k): {str(re)}") return f"Error while making the request to the URL (newspaper3k): {str(re)}" except Exception as e: - print(f"Unknown error while extracting text from HTML (newspaper3k): {str(e)}") + logger.error(f"Unknown error while extracting text from HTML (newspaper3k): {str(e)}") return "" def extract_with_bs4(self, url): @@ -96,11 +97,11 @@ def extract_with_bs4(self, url): elif response.status_code == 404: return f"Error: 404. Url is invalid or does not exist. Try with valid url..." else: - print(f"Error while extracting text from HTML (bs4): {response.status_code}") + logger.error(f"Error while extracting text from HTML (bs4): {response.status_code}") return f"Error while extracting text from HTML (bs4): {response.status_code}" except Exception as e: - print(f"Unknown error while extracting text from HTML (bs4): {str(e)}") + logger.error(f"Unknown error while extracting text from HTML (bs4): {str(e)}") return "" def extract_with_lxml(self, url): @@ -122,14 +123,14 @@ def extract_with_lxml(self, url): return content except ArticleException as ae: - print(f"Error while extracting text from HTML (lxml): {str(ae)}") + logger.error("Error while extracting text from HTML (lxml): {str(ae)}") return "" except RequestException as re: - print(f"Error while making the request to the URL (lxml): {str(re)}") + logger.error(f"Error while making the request to the URL (lxml): {str(re)}") return "" except Exception as e: - print(f"Unknown error while extracting text from HTML (lxml): {str(e)}") + logger.error(f"Unknown error while extracting text from HTML (lxml): {str(e)}") return "" \ No newline at end of file diff --git a/superagi/jobs/agent_executor.py b/superagi/jobs/agent_executor.py index a91697bbd..55b7dfab1 100644 --- a/superagi/jobs/agent_executor.py +++ b/superagi/jobs/agent_executor.py @@ -41,6 +41,7 @@ from superagi.helper.encyption_helper import decrypt_data from sqlalchemy import func import superagi.worker +from superagi.lib.logger import logger engine = connect_db() Session = sessionmaker(bind=engine) @@ -117,7 +118,7 @@ def execute_next_action(self, agent_execution_id): db_agent_execution = session.query(AgentExecution).filter(AgentExecution.id == agent_execution_id).first() db_agent_execution.status = "ITERATION_LIMIT_EXCEEDED" session.commit() - print("ITERATION_LIMIT_CROSSED") + logger.info("ITERATION_LIMIT_CROSSED") return "ITERATION_LIMIT_CROSSED" parsed_config["agent_execution_id"] = agent_execution.id @@ -132,7 +133,7 @@ def execute_next_action(self, agent_execution_id): memory = VectorFactory.get_vector_storage("PineCone", "super-agent-index1", OpenAiEmbedding(model_api_key)) except: - print("Unable to setup the pinecone connection...") + logger.info("Unable to setup the pinecone connection...") memory = None user_tools = session.query(Tool).filter(Tool.id.in_(parsed_config["tools"])).all() @@ -161,7 +162,7 @@ def execute_next_action(self, agent_execution_id): session.commit() else: - print("Starting next job for agent execution id: ", agent_execution_id) + logger.info("Starting next job for agent execution id: ", agent_execution_id) superagi.worker.execute_agent.delay(agent_execution_id, datetime.now()) session.close() diff --git a/superagi/lib/logger.py b/superagi/lib/logger.py new file mode 100644 index 000000000..ee57972e5 --- /dev/null +++ b/superagi/lib/logger.py @@ -0,0 +1,64 @@ +import logging +import inspect + +class CustomLogRecord(logging.LogRecord): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + frame = inspect.currentframe().f_back + while frame: + if frame.f_globals['__name__'] != __name__ and frame.f_globals['__name__'] != 'logging': + break + frame = frame.f_back + + if frame: + self.filename = frame.f_code.co_filename + self.lineno = frame.f_lineno + else: + self.filename = "unknown" + self.lineno = 0 + +class SingletonMeta(type): + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] + +class Logger(metaclass=SingletonMeta): + def __init__(self, logger_name='Super AGI', log_level=logging.DEBUG): + if not hasattr(self, 'logger'): + self.logger = logging.getLogger(logger_name) + self.logger.setLevel(log_level) + self.logger.makeRecord = self._make_custom_log_record + + console_handler = logging.StreamHandler() + console_handler.setLevel(log_level) + + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S %Z') + + console_handler.setFormatter(formatter) + self.logger.addHandler(console_handler) + + def _make_custom_log_record(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None): + return CustomLogRecord(name, level, fn, lno, msg, args, exc_info, func=func, extra=extra, sinfo=sinfo) + + def debug(self, message): + self.logger.debug(message) + + def info(self, message): + self.logger.info(message) + + def warning(self, message): + self.logger.warning(message) + + def error(self, message): + self.logger.error(message) + + def critical(self, message): + self.logger.critical(message) + +logger = Logger('Super AGI') \ No newline at end of file diff --git a/superagi/llms/openai.py b/superagi/llms/openai.py index 86e58dbdb..bd3d6e97b 100644 --- a/superagi/llms/openai.py +++ b/superagi/llms/openai.py @@ -5,6 +5,7 @@ import openai from superagi.llms.base_llm import BaseLlm from superagi.config.config import get_config +from superagi.lib.logger import logger class OpenAi(BaseLlm): @@ -45,7 +46,7 @@ def chat_completion(self, messages, max_tokens=get_config("MAX_MODEL_TOKEN_LIMIT content = response.choices[0].message["content"] return {"response": response, "content": content} except Exception as exception: - print("Exception:", exception) + logger.info("Exception:", exception) return {"error": exception} def generate_image(self, prompt: str, size: int = 512, num: int = 2): diff --git a/superagi/message_broker/kafka.py b/superagi/message_broker/kafka.py index 4f1e93e1b..ed1c9a8a7 100644 --- a/superagi/message_broker/kafka.py +++ b/superagi/message_broker/kafka.py @@ -1,6 +1,6 @@ import os from confluent_kafka import Producer, Consumer, KafkaError - +from superagi.lib.logger import logger import redis from superagi.config.config import get_config @@ -36,7 +36,7 @@ def start_listener(self, topic, on_message_received): # Subscribe to the topic consumer.subscribe([topic]) - print("Waiting for messages. To exit, press CTRL+C") + logger.info("Waiting for messages. To exit, press CTRL+C") # Start consuming messages try: @@ -50,7 +50,7 @@ def start_listener(self, topic, on_message_received): continue else: # Log error and continue to next message - print("Error occurred:", message.error()) + logger.error("Error occurred:", message.error()) continue # Process the received message diff --git a/superagi/message_broker/redis.py b/superagi/message_broker/redis.py index dd2107be8..58be6e73a 100644 --- a/superagi/message_broker/redis.py +++ b/superagi/message_broker/redis.py @@ -2,6 +2,7 @@ import redis from superagi.config.config import get_config +from superagi.lib.logger import logger # Message broker connection parameters @@ -14,14 +15,14 @@ def __int__(self): def push_message(self, topic: str, message: str): # Establish connection to the message broker self.redis_client.publish(topic, message) - print("Message sent to the broker.") + logger.info("Message sent to the broker.") def start_listener(self, topic: str, on_message_received: callable): # Subscribe to the channel pubsub = self.redis_client.pubsub() pubsub.subscribe(topic) - print("Waiting for messages. To exit, press CTRL+C") + logger.info("Waiting for messages. To exit, press CTRL+C") # Start listening for messages for message in pubsub.listen(): diff --git a/superagi/models/agent.py b/superagi/models/agent.py index 7813fc369..f701d884b 100644 --- a/superagi/models/agent.py +++ b/superagi/models/agent.py @@ -11,6 +11,7 @@ from superagi.models.agent_workflow import AgentWorkflow #from superagi.models import AgentConfiguration from superagi.models.base_model import DBBaseModel +from superagi.lib.logger import logger @@ -76,7 +77,7 @@ def create_agent_with_config(cls, db, agent_with_config): if agent_with_config.agent_type == "Don't Maintain Task Queue": agent_workflow = db.session.query(AgentWorkflow).filter(AgentWorkflow.name == "Goal Based Agent").first() - print(agent_workflow) + logger.info(agent_workflow) db_agent.agent_workflow_id = agent_workflow.id elif agent_with_config.agent_type == "Maintain Task Queue": agent_workflow = db.session.query(AgentWorkflow).filter( diff --git a/superagi/models/db.py b/superagi/models/db.py index 830514f12..b7bca4ce3 100644 --- a/superagi/models/db.py +++ b/superagi/models/db.py @@ -1,5 +1,6 @@ from sqlalchemy import create_engine from superagi.config.config import get_config +from superagi.lib.logger import logger database_url = get_config('POSTGRES_URL') db_username = get_config('DB_USERNAME') @@ -26,8 +27,8 @@ def connect_db(): # Test the connection try: connection = engine.connect() - print("Connected to the database! @ " + db_url) + logger.info("Connected to the database! @ " + db_url) connection.close() except Exception as e: - print("Unable to connect to the database:", e) + logger.error("Unable to connect to the database:", e) return engine diff --git a/superagi/tools/code/tools.py b/superagi/tools/code/tools.py index 8e74ea3f8..2e1361d92 100644 --- a/superagi/tools/code/tools.py +++ b/superagi/tools/code/tools.py @@ -5,6 +5,7 @@ from superagi.agent.agent_prompt_builder import AgentPromptBuilder from superagi.llms.base_llm import BaseLlm from superagi.tools.base_tool import BaseTool +from superagi.lib.logger import logger class CodingSchema(BaseModel): @@ -46,5 +47,5 @@ def _execute(self, task_description: str): result = self.llm.chat_completion(messages, max_tokens=self.max_token_limit) return result["content"] except Exception as e: - print(e) + logger.error(e) return f"Error generating text: {e}" \ No newline at end of file diff --git a/superagi/tools/file/write_file.py b/superagi/tools/file/write_file.py index cb4ef703c..41a4d91e1 100644 --- a/superagi/tools/file/write_file.py +++ b/superagi/tools/file/write_file.py @@ -8,6 +8,7 @@ from superagi.helper.resource_helper import ResourceHelper # from superagi.helper.s3_helper import upload_to_s3 from superagi.helper.s3_helper import S3Helper +from superagi.lib.logger import logger @@ -52,7 +53,7 @@ def _execute(self, file_name: str, content: str): if resource.storage_type == "S3": s3_helper = S3Helper() s3_helper.upload_file(file, path=resource.path) - print("Resource Uploaded to S3!") + logger.info("Resource Uploaded to S3!") session.close() return f"File written to successfully - {file_name}" except Exception as err: diff --git a/superagi/tools/github/delete_file.py b/superagi/tools/github/delete_file.py index 0146153ba..7f1dec301 100644 --- a/superagi/tools/github/delete_file.py +++ b/superagi/tools/github/delete_file.py @@ -4,6 +4,7 @@ from superagi.config.config import get_config from superagi.tools.base_tool import BaseTool from superagi.helper.github_helper import GithubHelper +from superagi.lib.logger import logger class GithubDeleteFileSchema(BaseModel): @@ -54,7 +55,7 @@ def _execute(self, repository_name: str, base_branch: str, file_name: str, commi if repository_owner != github_username: fork_response = github_helper.make_fork(repository_owner, repository_name, base_branch, headers) branch_response = github_helper.create_branch(repository_name, base_branch, head_branch, headers) - print("branch_response", branch_response) + logger.info("branch_response", branch_response) if branch_response == 201 or branch_response == 422: github_helper.sync_branch(github_username, repository_name, base_branch, head_branch, headers) diff --git a/superagi/tools/human/tool.py b/superagi/tools/human/tool.py index d5ad344e6..971dc7385 100644 --- a/superagi/tools/human/tool.py +++ b/superagi/tools/human/tool.py @@ -3,11 +3,11 @@ from pydantic import Field, BaseModel from superagi.tools.base_tool import BaseTool - +from superagi.lib.logger import logger def print_func(text: str) -> None: - print("\n") - print(text) + logger.info("\n") + logger.info(text) class HumanInputSchema(BaseModel): query: str = Field( diff --git a/superagi/tools/image_generation/dalle_image_gen.py b/superagi/tools/image_generation/dalle_image_gen.py index dc5067c65..010e28c7b 100644 --- a/superagi/tools/image_generation/dalle_image_gen.py +++ b/superagi/tools/image_generation/dalle_image_gen.py @@ -9,6 +9,7 @@ from superagi.helper.resource_helper import ResourceHelper from superagi.helper.s3_helper import S3Helper from sqlalchemy.orm import sessionmaker +from superagi.lib.logger import logger @@ -64,7 +65,7 @@ def _execute(self, prompt: str, image_name: list, size: int = 512, num: int = 2) s3_helper = S3Helper() s3_helper.upload_file(img, path=resource.path) session.close() - print(f"Image {image} saved successfully") + logger.info(f"Image {image} saved successfully") except Exception as err: return f"Error: {err}" return "Images downloaded successfully" diff --git a/superagi/tools/searx/search_scraper.py b/superagi/tools/searx/search_scraper.py index ceaf88283..725bf30fc 100644 --- a/superagi/tools/searx/search_scraper.py +++ b/superagi/tools/searx/search_scraper.py @@ -3,6 +3,7 @@ import httpx from bs4 import BeautifulSoup from pydantic import BaseModel +from superagi.lib.logger import logger searx_hosts = ["https://search.ononoki.org", "https://searx.be", "https://search.us.projectsegfau.lt"] @@ -26,7 +27,7 @@ def search(query): searx_url + "/search", params={"q": query}, headers={"User-Agent": "Mozilla/5.0 (X11; Linux i686; rv:109.0) Gecko/20100101 Firefox/114.0"} ) if res.status_code != 200: - print(res.status_code, searx_url) + logger.info(res.status_code, searx_url) raise Exception(f"Searx returned {res.status_code} status code") return res.text diff --git a/superagi/tools/thinking/tools.py b/superagi/tools/thinking/tools.py index 3a4ea9d9d..1eb595b9f 100644 --- a/superagi/tools/thinking/tools.py +++ b/superagi/tools/thinking/tools.py @@ -9,6 +9,7 @@ from superagi.config.config import get_config from superagi.llms.base_llm import BaseLlm from pydantic import BaseModel, Field, PrivateAttr +from superagi.lib.logger import logger class ThinkingSchema(BaseModel): @@ -49,5 +50,5 @@ def _execute(self, task_description: str): result = self.llm.chat_completion(messages, max_tokens=self.max_token_limit) return result["content"] except Exception as e: - print(e) + logger.error(e) return f"Error generating text: {e}" \ No newline at end of file diff --git a/superagi/worker.py b/superagi/worker.py index 305a36f27..f1c31da23 100644 --- a/superagi/worker.py +++ b/superagi/worker.py @@ -1,4 +1,5 @@ from __future__ import absolute_import +from superagi.lib.logger import logger from celery import Celery @@ -14,5 +15,5 @@ @app.task(name="execute_agent", autoretry_for=(Exception,), retry_backoff=2, max_retries=5) def execute_agent(agent_execution_id: int, time): """Execute an agent step in background.""" - print("Execute agent:" + str(time) + "," + str(agent_execution_id)) + logger.info("Execute agent:" + str(time) + "," + str(agent_execution_id)) AgentExecutor().execute_next_action(agent_execution_id=agent_execution_id) diff --git a/test.py b/test.py index 112d0d775..f8591dc46 100644 --- a/test.py +++ b/test.py @@ -1,6 +1,7 @@ import argparse from datetime import datetime from time import time +from superagi.lib.logger import logger from sqlalchemy.orm import sessionmaker @@ -44,14 +45,14 @@ def run_superagi_cli(agent_name=None,agent_description=None,agent_goals=None): session.add(organization) session.flush() # Flush pending changes to generate the agent's ID session.commit() - print(organization) + logger.info(organization) # Create default project associated with the organization project = Project(name='Default Project', description='Default project description', organisation_id=organization.id) session.add(project) session.flush() # Flush pending changes to generate the agent's ID session.commit() - print(project) + logger.info(project) #Agent if agent_name is None: @@ -62,7 +63,7 @@ def run_superagi_cli(agent_name=None,agent_description=None,agent_goals=None): session.add(agent) session.flush() session.commit() - print(agent) + logger.info(agent) #Agent Config # Create Agent Configuration @@ -93,16 +94,16 @@ def run_superagi_cli(agent_name=None,agent_description=None,agent_goals=None): session.add_all(agent_configurations) session.commit() - print("Agent Config : ") - print(agent_configurations) + logger.info("Agent Config : ") + logger.info(agent_configurations) # Create agent execution in RUNNING state associated with the agent execution = AgentExecution(status='RUNNING', agent_id=agent.id, last_execution_time=datetime.utcnow()) session.add(execution) session.commit() - print("Final Execution") - print(execution) + logger.info("Final Execution") + logger.info(execution) execute_agent.delay(execution.id, datetime.now()) diff --git a/ui.py b/ui.py index fe4dce832..a030a8dcc 100644 --- a/ui.py +++ b/ui.py @@ -4,11 +4,11 @@ from time import sleep import shutil from sys import platform - +from superagi.lib.logger import logger def check_command(command, message): if not shutil.which(command): - print(message) + logger.info(message) sys.exit(1) @@ -17,7 +17,7 @@ def run_npm_commands(shell=False): try: subprocess.run(["npm", "install"], check=True,shell=shell) except subprocess.CalledProcessError: - print(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") + logger.error(f"Error during '{' '.join(sys.exc_info()[1].cmd)}'. Exiting.") sys.exit(1) os.chdir("..") @@ -33,11 +33,11 @@ def run_server(shell=False): def cleanup(api_process, ui_process, celery_process): - print("Shutting down processes...") + logger.info("Shutting down processes...") api_process.terminate() ui_process.terminate() celery_process.terminate() - print("Processes terminated. Exiting.") + logger.info("Processes terminated. Exiting.") sys.exit(1)