Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
accommodating new API.
Browse files Browse the repository at this point in the history
Signed-off-by: Ye, Xinyu <xinyu.ye@intel.com>
XinyuYe-Intel committed Jan 7, 2025
1 parent c35de47 commit fd0832f
Showing 5 changed files with 50 additions and 55 deletions.
2 changes: 1 addition & 1 deletion comps/finetuning/src/README.md
Original file line number Diff line number Diff line change
@@ -244,7 +244,7 @@ curl http://${your_ip}:8015/v1/finetune/list_checkpoints -X POST -H "Content-Typ

### 3.4 Leverage fine-tuned model

After fine-tuning job is done, fine-tuned model can be chosen from listed checkpoints, then the fine-tuned model can be used in other microservices. For example, fine-tuned reranking model can be used in [reranks](../../reranks/fastrag/README.md) microservice by assign its path to the environment variable `RERANK_MODEL_ID`, fine-tuned embedding model can be used in [embeddings](../../embeddings/README.md) microservice by assign its path to the environment variable `model`, LLMs after instruction tuning can be used in [llms](../../llms/text-generation/README.md) microservice by assign its path to the environment variable `your_hf_llm_model`.
After fine-tuning job is done, fine-tuned model can be chosen from listed checkpoints, then the fine-tuned model can be used in other microservices. For example, fine-tuned reranking model can be used in [reranks](../../reranks/src/README.md) microservice by assign its path to the environment variable `RERANK_MODEL_ID`, fine-tuned embedding model can be used in [embeddings](../../embeddings/src/README.md) microservice by assign its path to the environment variable `model`, LLMs after instruction tuning can be used in [llms](../../llms/text-generation/README.md) microservice by assign its path to the environment variable `your_hf_llm_model`.

## 🚀4. Descriptions for Finetuning parameters

3 changes: 2 additions & 1 deletion comps/finetuning/src/integrations/opea.py
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@
from pydantic_yaml import to_yaml_file
from ray.job_submission import JobSubmissionClient

from comps import CustomLogger, OpeaComponent
from comps import CustomLogger, OpeaComponent, OpeaComponentRegistry
from comps.cores.proto.api_protocol import (
FileObject,
FineTuningJob,
@@ -85,6 +85,7 @@ async def upload_file(purpose: str = Form(...), file: UploadFile = File(...)):
return UploadFileRequest(purpose=purpose, file=file)


@OpeaComponentRegistry.register("OPEA_FINETUNING")
class OpeaFinetuning(OpeaComponent):
"""A specialized finetuning component derived from OpeaComponent for finetuning services."""

32 changes: 0 additions & 32 deletions comps/finetuning/src/opea_finetuning_controller.py

This file was deleted.

32 changes: 32 additions & 0 deletions comps/finetuning/src/opea_finetuning_loader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

from comps import CustomLogger, OpeaComponentLoader

logger = CustomLogger("opea_finetuning_loader")


class OpeaFinetuningLoader(OpeaComponentLoader):
def __init__(self, component_name, **kwargs):
super().__init__(component_name=component_name, **kwargs)

def invoke(self, *args, **kwargs):
pass

def create_finetuning_jobs(self, *args, **kwargs):
return self.component.create_finetuning_jobs(*args, **kwargs)

def cancel_finetuning_job(self, *args, **kwargs):
return self.component.cancel_finetuning_job(*args, **kwargs)

def list_finetuning_checkpoints(self, *args, **kwargs):
return self.component.list_finetuning_checkpoints(*args, **kwargs)

def list_finetuning_jobs(self, *args, **kwargs):
return self.component.list_finetuning_jobs(*args, **kwargs)

def retrieve_finetuning_job(self, *args, **kwargs):
return self.component.retrieve_finetuning_job(*args, **kwargs)

async def upload_training_files(self, *args, **kwargs):
return await self.component.upload_training_files(*args, **kwargs)
36 changes: 15 additions & 21 deletions comps/finetuning/src/opea_finetuning_microservice.py
Original file line number Diff line number Diff line change
@@ -1,56 +1,50 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os

from fastapi import BackgroundTasks, Depends

from comps import CustomLogger, opea_microservices, register_microservice
from comps.cores.proto.api_protocol import FineTuningJobIDRequest, UploadFileRequest
from comps.finetuning.src.integrations.finetune_config import FineTuningParams
from comps.finetuning.src.integrations.opea import OpeaFinetuning, upload_file
from comps.finetuning.src.opea_finetuning_controller import OpeaFinetuningController
from comps.finetuning.src.opea_finetuning_loader import OpeaFinetuningLoader

logger = CustomLogger("opea_finetuning_microservice")

# Initialize Controller
controller = OpeaFinetuningController()


# Register components
try:
# Instantiate Finetuning components
finetuning = OpeaFinetuning(name="OpeaFinetuning", description="OPEA Finetuning Service")
controller.register(finetuning)

# Discover and activate a healthy component
controller.discover_and_activate()
except Exception as e:
logger.error(f"Failed to initialize components: {e}")
finetuning_component_name = os.getenv("FINETUNING_COMPONENT_NAME", "OPEA_FINETUNING")
# Initialize OpeaComponentLoader
loader = OpeaFinetuningLoader(
finetuning_component_name,
description=f"OPEA FINETUNING Component: {finetuning_component_name}",
)


@register_microservice(name="opea_service@finetuning", endpoint="/v1/fine_tuning/jobs", host="0.0.0.0", port=8015)
def create_finetuning_jobs(request: FineTuningParams, background_tasks: BackgroundTasks):
return controller.create_finetuning_jobs(request, background_tasks)
return loader.create_finetuning_jobs(request, background_tasks)


@register_microservice(
name="opea_service@finetuning", endpoint="/v1/fine_tuning/jobs", host="0.0.0.0", port=8015, methods=["GET"]
)
def list_finetuning_jobs():
return controller.list_finetuning_jobs()
return loader.list_finetuning_jobs()


@register_microservice(
name="opea_service@finetuning", endpoint="/v1/fine_tuning/jobs/retrieve", host="0.0.0.0", port=8015
)
def retrieve_finetuning_job(request: FineTuningJobIDRequest):
job = controller.retrieve_finetuning_job(request)
job = loader.retrieve_finetuning_job(request)
return job


@register_microservice(
name="opea_service@finetuning", endpoint="/v1/fine_tuning/jobs/cancel", host="0.0.0.0", port=8015
)
def cancel_finetuning_job(request: FineTuningJobIDRequest):
job = controller.cancel_finetuning_job(request)
job = loader.cancel_finetuning_job(request)
return job


@@ -61,15 +55,15 @@ def cancel_finetuning_job(request: FineTuningJobIDRequest):
port=8015,
)
async def upload_training_files(request: UploadFileRequest = Depends(upload_file)):
uploadFileInfo = await controller.upload_training_files(request)
uploadFileInfo = await loader.upload_training_files(request)
return uploadFileInfo


@register_microservice(
name="opea_service@finetuning", endpoint="/v1/finetune/list_checkpoints", host="0.0.0.0", port=8015
)
def list_checkpoints(request: FineTuningJobIDRequest):
checkpoints = controller.list_finetuning_checkpoints(request)
checkpoints = loader.list_finetuning_checkpoints(request)
return checkpoints


0 comments on commit fd0832f

Please sign in to comment.