Skip to content

User experiment migration tool #1724

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
from uuid import uuid4

from django.core.management.base import BaseCommand
from django.db import transaction
from django.db.models import Q

from apps.experiments.helper import convert_non_pipeline_experiment_to_pipeline
from apps.experiments.models import Experiment
from apps.pipelines.models import Pipeline
from apps.pipelines.nodes.nodes import AssistantNode, LLMResponseWithPrompt
from apps.teams.models import Flag


Expand Down Expand Up @@ -104,7 +101,7 @@ def handle(self, *args, **options):
for experiment in experiments_to_convert:
try:
with transaction.atomic():
self._convert_experiment(experiment)
convert_non_pipeline_experiment_to_pipeline(experiment)
converted_count += 1
self.stdout.write(self.style.SUCCESS(f"Success: {experiment.name}"))
except Exception as e:
Expand All @@ -122,79 +119,8 @@ def _get_experiment_type(self, experiment):
return "LLM"
else:
return "Unknown"

def _convert_experiment(self, experiment):
if experiment.assistant:
pipeline = self._create_assistant_pipeline(experiment)
elif experiment.llm_provider:
pipeline = self._create_llm_pipeline(experiment)
else:
raise ValueError(f"Unknown experiment type for experiment {experiment.id}")

experiment.pipeline = pipeline
experiment.assistant = None
experiment.llm_provider = None
experiment.llm_provider_model = None

experiment.save()

def _get_chatbots_flag_team_ids(self):
chatbots_flag = Flag.objects.get(name="flag_chatbots")
return list(chatbots_flag.teams.values_list("id", flat=True))

def _create_pipeline_with_node(self, experiment, node_type, node_label, node_params):
"""Create a pipeline with start -> custom_node -> end structure."""
pipeline_name = f"{experiment.name} Pipeline"
middle_node_config = {
"id": str(uuid4()),
"type": "pipelineNode",
"position": {"x": 400, "y": 200},
"data": {"type": node_type, "label": node_label, "params": node_params},
}

return Pipeline._create_pipeline_with_nodes(
team=experiment.team, name=pipeline_name, middle_nodes_config=middle_node_config
)

def _create_llm_pipeline(self, experiment):
"""Create a start -> LLMResponseWithPrompt -> end nodes pipeline for an LLM experiment."""
llm_params = {
"name": "llm",
"llm_provider_id": experiment.llm_provider.id,
"llm_provider_model_id": experiment.llm_provider_model.id,
"llm_temperature": experiment.temperature,
"history_type": "global",
"history_name": None,
"history_mode": "summarize",
"user_max_token_limit": experiment.llm_provider_model.max_token_limit,
"max_history_length": 10,
"source_material_id": experiment.source_material.id if experiment.source_material else None,
"prompt": experiment.prompt_text or "",
"tools": list(experiment.tools) if experiment.tools else [],
"custom_actions": [
op.get_model_id(False)
for op in experiment.custom_action_operations.select_related("custom_action").all()
],
"built_in_tools": [],
"tool_config": {},
}

return self._create_pipeline_with_node(
experiment=experiment, node_type=LLMResponseWithPrompt.__name__, node_label="LLM", node_params=llm_params
)

def _create_assistant_pipeline(self, experiment):
"""Createii a start -> AssistantNode -> end nodes pipeline for an assistant experiment."""
assistant_params = {
"name": "assistant",
"assistant_id": str(experiment.assistant.id),
"citations_enabled": experiment.citations_enabled,
"input_formatter": experiment.input_formatter or "",
}

return self._create_pipeline_with_node(
experiment=experiment,
node_type=AssistantNode.__name__,
node_label="OpenAI Assistant",
node_params=assistant_params,
)
8 changes: 5 additions & 3 deletions apps/experiments/views/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.management import call_command
from django.db import transaction
from django.db.models import Case, Count, IntegerField, When
from django.http import FileResponse, Http404, HttpResponse, HttpResponseForbidden, HttpResponseRedirect
Expand Down Expand Up @@ -1505,14 +1504,17 @@ def get_release_status_badge(request, team_slug: str, experiment_id: int):


def migrate_experiment_view(request, team_slug, experiment_id):
from apps.pipelines.helper import convert_non_pipeline_experiment_to_pipeline

experiment = get_object_or_404(Experiment, id=experiment_id, team__slug=team_slug)
failed_url = reverse(
"experiments:single_experiment_home",
kwargs={"team_slug": team_slug, "experiment_id": experiment_id},
)
try:
experiment = Experiment.objects.get(id=experiment_id)
call_command("migrate_nonpipeline_to_pipeline_experiments", experiment_id=experiment_id, skip_confirmation=True)
with transaction.atomic():
experiment = Experiment.objects.get(id=experiment_id)
convert_non_pipeline_experiment_to_pipeline(experiment)
messages.success(request, f'Successfully migrated experiment "{experiment.name}" to chatbot!')
return redirect("chatbots:single_chatbot_home", team_slug=team_slug, experiment_id=experiment_id)
except Exception as e:
Expand Down
80 changes: 80 additions & 0 deletions apps/pipelines/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,83 @@ def duplicate_pipeline_with_new_ids(pipeline_data):
edge["target"] = new_target_id

return new_data, old_to_new_node_ids


def convert_non_pipeline_experiment_to_pipeline(experiment):
if experiment.assistant:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: extra paranoid validation that we can actually migrate this before we do

Suggested change
if experiment.assistant:
if experiment.pipeline:
raise ValueError(f"Experiment already has a pipeline attached: {experiment.id}")
elif experiment.assistant:

pipeline = _create_assistant_pipeline(experiment)
elif experiment.llm_provider:
pipeline = _create_llm_pipeline(experiment)
else:
raise ValueError(f"Unknown experiment type for experiment {experiment.id}")

experiment.pipeline = pipeline
experiment.assistant = None
experiment.llm_provider = None
experiment.llm_provider_model = None
experiment.save()


def _create_pipeline_with_node(experiment, node_type, node_label, node_params):
from .models import Pipeline

"""Create a pipeline with start -> custom_node -> end structure."""
pipeline_name = f"{experiment.name} Pipeline"
middle_node_config = {
"id": str(uuid4()),
"type": "pipelineNode",
"position": {"x": 400, "y": 200},
"data": {"type": node_type, "label": node_label, "params": node_params},
}

return Pipeline._create_pipeline_with_nodes(
team=experiment.team, name=pipeline_name, middle_nodes_config=middle_node_config
)


def _create_llm_pipeline(experiment):
from apps.pipelines.nodes.nodes import LLMResponseWithPrompt

"""Create a start -> LLMResponseWithPrompt -> end nodes pipeline for an LLM experiment."""
llm_params = {
"name": "llm",
"llm_provider_id": experiment.llm_provider.id,
"llm_provider_model_id": experiment.llm_provider_model.id,
"llm_temperature": experiment.temperature,
"history_type": "global",
"history_name": None,
"history_mode": "summarize",
"user_max_token_limit": experiment.llm_provider_model.max_token_limit,
"max_history_length": 10,
"source_material_id": experiment.source_material.id if experiment.source_material else None,
"prompt": experiment.prompt_text or "",
"tools": list(experiment.tools) if experiment.tools else [],
"custom_actions": [
op.get_model_id(False) for op in experiment.custom_action_operations.select_related("custom_action").all()
],
"built_in_tools": [],
"tool_config": {},
}

return _create_pipeline_with_node(
experiment=experiment, node_type=LLMResponseWithPrompt.__name__, node_label="LLM", node_params=llm_params
)


def _create_assistant_pipeline(experiment):
from apps.pipelines.nodes.nodes import AssistantNode

"""Create a start -> AssistantNode -> end nodes pipeline for an assistant experiment."""
assistant_params = {
"name": "assistant",
"assistant_id": str(experiment.assistant.id),
"citations_enabled": experiment.citations_enabled,
"input_formatter": experiment.input_formatter or "",
}

return _create_pipeline_with_node(
experiment=experiment,
node_type=AssistantNode.__name__,
node_label="OpenAI Assistant",
node_params=assistant_params,
)