Spaces:
Runtime error
Runtime error
Commit
·
c8e458d
1
Parent(s):
f611bc7
deployment-1
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gradio/certificate.pem +31 -0
- app.py +95 -0
- erp_core/Tools/__pycache__/customer_relationship_management.cpython-311.pyc +0 -0
- erp_core/Tools/__pycache__/finalcial_management.cpython-311.pyc +0 -0
- erp_core/Tools/__pycache__/human_resource.cpython-311.pyc +0 -0
- erp_core/Tools/__pycache__/project_management.cpython-311.pyc +0 -0
- erp_core/Tools/__pycache__/supply_chain_management.cpython-311.pyc +0 -0
- erp_core/Tools/customer_relationship_management.py +14 -0
- erp_core/Tools/finalcial_management.py +26 -0
- erp_core/Tools/human_resource.py +27 -0
- erp_core/Tools/project_management.py +14 -0
- erp_core/Tools/supply_chain_management.py +14 -0
- erp_core/__pycache__/_event.cpython-311.pyc +0 -0
- erp_core/__pycache__/_llm.cpython-311.pyc +0 -0
- erp_core/__pycache__/asr_and_tts.cpython-311.pyc +0 -0
- erp_core/__pycache__/assistant_class.cpython-311.pyc +0 -0
- erp_core/__pycache__/config.cpython-311.pyc +0 -0
- erp_core/__pycache__/entry_node.cpython-311.pyc +0 -0
- erp_core/__pycache__/node_builder.cpython-311.pyc +0 -0
- erp_core/__pycache__/state_definer.cpython-311.pyc +0 -0
- erp_core/_event.py +39 -0
- erp_core/_llm.py +12 -0
- erp_core/asr_and_tts.py +62 -0
- erp_core/assistant_class.py +59 -0
- erp_core/config.py +2 -0
- erp_core/display_image.py +14 -0
- erp_core/entry_node.py +23 -0
- erp_core/node_builder.py +297 -0
- erp_core/node_builder/customer_relationship_management_node.py +0 -0
- erp_core/node_builder/finalcial_management_node.py +41 -0
- erp_core/node_builder/graph_builder_node.py +50 -0
- erp_core/node_builder/human_resource_node.py +42 -0
- erp_core/node_builder/primary_assistant_node.py +70 -0
- erp_core/node_builder/project_management_node.py +0 -0
- erp_core/node_builder/supply_chain_management_node.py +0 -0
- erp_core/runnable/__pycache__/crm_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/__pycache__/fm_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/__pycache__/hr_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/__pycache__/pm_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/__pycache__/primary_assistant_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/__pycache__/scm_prompt.cpython-311.pyc +0 -0
- erp_core/runnable/crm_prompt.py +31 -0
- erp_core/runnable/fm_prompt.py +36 -0
- erp_core/runnable/hr_prompt.py +37 -0
- erp_core/runnable/pm_prompt.py +36 -0
- erp_core/runnable/primary_assistant_prompt.py +61 -0
- erp_core/runnable/scm_prompt.py +38 -0
- erp_core/state_definer.py +32 -0
- erp_core/tool_binder/__pycache__/tool_binder.cpython-311.pyc +0 -0
- erp_core/tool_binder/tool_binder.py +36 -0
.gradio/certificate.pem
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN CERTIFICATE-----
|
| 2 |
+
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
|
| 3 |
+
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
|
| 4 |
+
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
|
| 5 |
+
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
|
| 6 |
+
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
|
| 7 |
+
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
|
| 8 |
+
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
|
| 9 |
+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
|
| 10 |
+
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
|
| 11 |
+
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
|
| 12 |
+
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
|
| 13 |
+
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
|
| 14 |
+
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
|
| 15 |
+
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
|
| 16 |
+
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
|
| 17 |
+
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
|
| 18 |
+
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
|
| 19 |
+
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
|
| 20 |
+
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
|
| 21 |
+
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
|
| 22 |
+
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
|
| 23 |
+
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
|
| 24 |
+
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
|
| 25 |
+
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
|
| 26 |
+
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
|
| 27 |
+
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
|
| 28 |
+
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
|
| 29 |
+
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
|
| 30 |
+
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
|
| 31 |
+
-----END CERTIFICATE-----
|
app.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from erp_core.node_builder import compile_graph
|
| 2 |
+
from erp_core._event import _print_event
|
| 3 |
+
from erp_core.asr_and_tts import transcribe, tts
|
| 4 |
+
import gradio as gr
|
| 5 |
+
import time
|
| 6 |
+
|
| 7 |
+
# Function to initialize a new chat state
|
| 8 |
+
def new_chat():
|
| 9 |
+
thread_id = int(time.time() * 1000)
|
| 10 |
+
graph = compile_graph()
|
| 11 |
+
message_history = []
|
| 12 |
+
tool_output = None
|
| 13 |
+
print("New Chat Initialized")
|
| 14 |
+
return {
|
| 15 |
+
"thread_id": thread_id,
|
| 16 |
+
"graph": graph,
|
| 17 |
+
"message_history": message_history,
|
| 18 |
+
"tool_output": tool_output,
|
| 19 |
+
"assistant_state": "primary_assistant",
|
| 20 |
+
"previous_state": "primary_assistant",
|
| 21 |
+
"tts_audio": None,
|
| 22 |
+
}, []
|
| 23 |
+
|
| 24 |
+
# Main processing function
|
| 25 |
+
def run(audio, state):
|
| 26 |
+
try:
|
| 27 |
+
if audio is None:
|
| 28 |
+
return state["assistant_state"], state["message_history"], state["tts_audio"], None, state["tool_output"]
|
| 29 |
+
|
| 30 |
+
user_input = transcribe(audio)
|
| 31 |
+
print("User:", user_input)
|
| 32 |
+
|
| 33 |
+
for event in state["graph"].stream(
|
| 34 |
+
{"messages": ("user", user_input)},
|
| 35 |
+
config={"configurable": {"thread_id": state["thread_id"]}},
|
| 36 |
+
):
|
| 37 |
+
for value in event.values():
|
| 38 |
+
if "messages" in value:
|
| 39 |
+
_printed = set()
|
| 40 |
+
assistant_states, assistant_messages = _print_event(value, _printed)
|
| 41 |
+
assistant_message = assistant_messages.content
|
| 42 |
+
print("State:", assistant_states)
|
| 43 |
+
print("Message:", assistant_messages)
|
| 44 |
+
if assistant_states is None:
|
| 45 |
+
state["assistant_state"] = state["previous_state"]
|
| 46 |
+
else:
|
| 47 |
+
state["previous_state"] = assistant_states
|
| 48 |
+
state["assistant_state"] = assistant_states
|
| 49 |
+
if assistant_states is None and "tool_call_id" not in assistant_messages:
|
| 50 |
+
state["tts_audio"] = tts(assistant_message)
|
| 51 |
+
if assistant_message == "" and assistant_states is None:
|
| 52 |
+
# print("\u001b[31mTool Call ID:\u001b[0m", assistant_messages.additional_kwargs)
|
| 53 |
+
state["tool_output"] = assistant_messages.additional_kwargs["tool_calls"]
|
| 54 |
+
|
| 55 |
+
state["message_history"].append({"role": "user", "content": user_input})
|
| 56 |
+
state["message_history"].append({"role": "assistant", "content": assistant_message})
|
| 57 |
+
|
| 58 |
+
return (
|
| 59 |
+
state["assistant_state"],
|
| 60 |
+
state["message_history"],
|
| 61 |
+
None, # Clear audio input
|
| 62 |
+
None,
|
| 63 |
+
state["tool_output"],
|
| 64 |
+
)
|
| 65 |
+
except Exception as e:
|
| 66 |
+
print(e)
|
| 67 |
+
return None, [], None, None, None # Clear audio input on error
|
| 68 |
+
|
| 69 |
+
# Gradio interface
|
| 70 |
+
with gr.Blocks() as demo:
|
| 71 |
+
chatbot_state = gr.State(new_chat) # Initialize new state per session
|
| 72 |
+
|
| 73 |
+
with gr.Row():
|
| 74 |
+
with gr.Column():
|
| 75 |
+
assistant_state_output = gr.Textbox(label="Current Assistant", interactive=False)
|
| 76 |
+
tool_output = gr.Textbox(label="Tool Output", interactive=False)
|
| 77 |
+
tts_output = gr.Audio(type="filepath", label="Assistant Voice Output", autoplay=True)
|
| 78 |
+
with gr.Column():
|
| 79 |
+
chatbot = gr.Chatbot(label="Conversation", type="messages")
|
| 80 |
+
|
| 81 |
+
audio_input = gr.Audio(sources="microphone", type="numpy", label="Speak", streaming=False)
|
| 82 |
+
|
| 83 |
+
audio_input.change(
|
| 84 |
+
fn=run,
|
| 85 |
+
inputs=[audio_input, chatbot_state], # Pass state as input
|
| 86 |
+
outputs=[assistant_state_output, chatbot, tts_output, audio_input, tool_output],
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
button = gr.Button("Start Chat/New Chat")
|
| 90 |
+
button.click(
|
| 91 |
+
fn=new_chat,
|
| 92 |
+
outputs=[chatbot_state, chatbot] # Reset state
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
demo.launch(share=True)
|
erp_core/Tools/__pycache__/customer_relationship_management.cpython-311.pyc
ADDED
|
Binary file (662 Bytes). View file
|
|
|
erp_core/Tools/__pycache__/finalcial_management.cpython-311.pyc
ADDED
|
Binary file (957 Bytes). View file
|
|
|
erp_core/Tools/__pycache__/human_resource.cpython-311.pyc
ADDED
|
Binary file (973 Bytes). View file
|
|
|
erp_core/Tools/__pycache__/project_management.cpython-311.pyc
ADDED
|
Binary file (720 Bytes). View file
|
|
|
erp_core/Tools/__pycache__/supply_chain_management.cpython-311.pyc
ADDED
|
Binary file (763 Bytes). View file
|
|
|
erp_core/Tools/customer_relationship_management.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.tools import tool
|
| 2 |
+
|
| 3 |
+
@tool
|
| 4 |
+
def customer_support(user_info: str):
|
| 5 |
+
"""Provide customer support."""
|
| 6 |
+
return {
|
| 7 |
+
"dialog_state": ["Customer_Relationship_Management"],
|
| 8 |
+
"messages": [
|
| 9 |
+
{
|
| 10 |
+
"type": "text",
|
| 11 |
+
"content": "Providing customer support"
|
| 12 |
+
}
|
| 13 |
+
]
|
| 14 |
+
}
|
erp_core/Tools/finalcial_management.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.tools import tool
|
| 2 |
+
|
| 3 |
+
@tool
|
| 4 |
+
def register_purchase_request(user_info: str):
|
| 5 |
+
"""Register a purchase request."""
|
| 6 |
+
return {
|
| 7 |
+
"dialog_state": ["Financial_Management"],
|
| 8 |
+
"messages": [
|
| 9 |
+
{
|
| 10 |
+
"type": "text",
|
| 11 |
+
"content": "Registering a purchase request"
|
| 12 |
+
}
|
| 13 |
+
]
|
| 14 |
+
}
|
| 15 |
+
@tool
|
| 16 |
+
def view_expense_report(user_info: str):
|
| 17 |
+
"""View an expense report."""
|
| 18 |
+
return {
|
| 19 |
+
"dialog_state": ["Financial_Management"],
|
| 20 |
+
"messages": [
|
| 21 |
+
{
|
| 22 |
+
"type": "text",
|
| 23 |
+
"content": "Viewing an expense report"
|
| 24 |
+
}
|
| 25 |
+
]
|
| 26 |
+
}
|
erp_core/Tools/human_resource.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.tools import tool
|
| 2 |
+
|
| 3 |
+
@tool
|
| 4 |
+
def employee_database_access(user_info: str):
|
| 5 |
+
"""Access the employee database."""
|
| 6 |
+
return {
|
| 7 |
+
"dialog_state": ["Human_Resource"],
|
| 8 |
+
"messages": [
|
| 9 |
+
{
|
| 10 |
+
"type": "text",
|
| 11 |
+
"content": "Accessing the employee database"
|
| 12 |
+
}
|
| 13 |
+
]
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
@tool
|
| 17 |
+
def leave_management(user_info: str):
|
| 18 |
+
"""Enter the leave management department."""
|
| 19 |
+
return {
|
| 20 |
+
"dialog_state": ["Human_Resource"],
|
| 21 |
+
"messages": [
|
| 22 |
+
{
|
| 23 |
+
"type": "text",
|
| 24 |
+
"content": "Entering the leave management department"
|
| 25 |
+
}
|
| 26 |
+
]
|
| 27 |
+
}
|
erp_core/Tools/project_management.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.tools import tool
|
| 2 |
+
|
| 3 |
+
@tool
|
| 4 |
+
def project_status_check(project_name: str, status: str) -> str:
|
| 5 |
+
"""Check the status of a project."""
|
| 6 |
+
return {
|
| 7 |
+
"dialog_state": ["Project_Management"],
|
| 8 |
+
"messages": [
|
| 9 |
+
{
|
| 10 |
+
"type": "text",
|
| 11 |
+
"content": f"The status of {project_name} is {status}."
|
| 12 |
+
}
|
| 13 |
+
]
|
| 14 |
+
}
|
erp_core/Tools/supply_chain_management.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.tools import tool
|
| 2 |
+
|
| 3 |
+
@tool
|
| 4 |
+
def product_quantity_check(product_name: str, quantity: int) -> str:
|
| 5 |
+
"""Check the quantity of a product in the supply chain."""
|
| 6 |
+
return {
|
| 7 |
+
"dialog_state": ["Supply_Chain_Management"],
|
| 8 |
+
"messages": [
|
| 9 |
+
{
|
| 10 |
+
"type": "text",
|
| 11 |
+
"content": f"The quantity of {product_name} is {quantity}."
|
| 12 |
+
}
|
| 13 |
+
]
|
| 14 |
+
}
|
erp_core/__pycache__/_event.cpython-311.pyc
ADDED
|
Binary file (2.42 kB). View file
|
|
|
erp_core/__pycache__/_llm.cpython-311.pyc
ADDED
|
Binary file (621 Bytes). View file
|
|
|
erp_core/__pycache__/asr_and_tts.cpython-311.pyc
ADDED
|
Binary file (2.93 kB). View file
|
|
|
erp_core/__pycache__/assistant_class.cpython-311.pyc
ADDED
|
Binary file (3.34 kB). View file
|
|
|
erp_core/__pycache__/config.cpython-311.pyc
ADDED
|
Binary file (206 Bytes). View file
|
|
|
erp_core/__pycache__/entry_node.cpython-311.pyc
ADDED
|
Binary file (1.75 kB). View file
|
|
|
erp_core/__pycache__/node_builder.cpython-311.pyc
ADDED
|
Binary file (15 kB). View file
|
|
|
erp_core/__pycache__/state_definer.cpython-311.pyc
ADDED
|
Binary file (1.48 kB). View file
|
|
|
erp_core/_event.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.messages import ToolMessage
|
| 2 |
+
from langchain_core.runnables import RunnableLambda
|
| 3 |
+
|
| 4 |
+
from langgraph.prebuilt import ToolNode
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def handle_tool_error(state) -> dict:
|
| 8 |
+
error = state.get("error")
|
| 9 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 10 |
+
return {
|
| 11 |
+
"messages": [
|
| 12 |
+
ToolMessage(
|
| 13 |
+
content=f"Error: {repr(error)}\n please fix your mistakes.",
|
| 14 |
+
tool_call_id=tc["id"],
|
| 15 |
+
)
|
| 16 |
+
for tc in tool_calls
|
| 17 |
+
]
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
def create_tool_node_with_fallback(tools: list) -> dict:
|
| 21 |
+
return ToolNode(tools).with_fallbacks(
|
| 22 |
+
[RunnableLambda(handle_tool_error)], exception_key="error"
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def _print_event(event: dict, _printed: set, max_length=1500):
|
| 26 |
+
current_state = event.get("dialog_state")
|
| 27 |
+
# if current_state:
|
| 28 |
+
# print("Currently in: ", current_state)
|
| 29 |
+
message = event.get("messages")
|
| 30 |
+
if message:
|
| 31 |
+
if isinstance(message, list):
|
| 32 |
+
message = message[-1]
|
| 33 |
+
if message.id not in _printed:
|
| 34 |
+
msg_repr = message.pretty_repr(html=True)
|
| 35 |
+
if len(msg_repr) > max_length:
|
| 36 |
+
msg_repr = msg_repr[:max_length] + " ... (truncated)"
|
| 37 |
+
# print(msg_repr)
|
| 38 |
+
_printed.add(message.id)
|
| 39 |
+
return current_state, message
|
erp_core/_llm.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# from dotenv import load_dotenv
|
| 2 |
+
# from langchain_anthropic import ChatAnthropic
|
| 3 |
+
from langchain_openai import ChatOpenAI
|
| 4 |
+
import erp_core.config as cfg
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
# load_dotenv(override=True)
|
| 9 |
+
api_key = os.getenv('OPENAI_API_KEY')
|
| 10 |
+
|
| 11 |
+
# llm = ChatAnthropic(model=cfg.anthropic_model_name, temperature=1)
|
| 12 |
+
llm = ChatOpenAI(model=cfg.model_name, temperature=0)
|
erp_core/asr_and_tts.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
# from dotenv import load_dotenv
|
| 3 |
+
import tempfile
|
| 4 |
+
import scipy.io.wavfile as wavfile
|
| 5 |
+
from openai import OpenAI
|
| 6 |
+
from elevenlabs import ElevenLabs, VoiceSettings, play, stream
|
| 7 |
+
|
| 8 |
+
# Load API keys from .env file
|
| 9 |
+
# load_dotenv(override=True)
|
| 10 |
+
openai_api_key = os.getenv('OPENAI_API_KEY')
|
| 11 |
+
elevenlabs_api_key = os.getenv('ELEVENLABS_API_KEY')
|
| 12 |
+
|
| 13 |
+
# Initialize clients
|
| 14 |
+
openai_client = OpenAI()
|
| 15 |
+
elevenlabs_client = ElevenLabs(api_key=elevenlabs_api_key)
|
| 16 |
+
|
| 17 |
+
# Function to transcribe audio using OpenAI Whisper API
|
| 18 |
+
def transcribe(audio):
|
| 19 |
+
if audio is None:
|
| 20 |
+
return "No audio provided.", None
|
| 21 |
+
|
| 22 |
+
# Audio is received as a tuple (sample_rate, audio_data)
|
| 23 |
+
sample_rate, audio_data = audio
|
| 24 |
+
|
| 25 |
+
# Save the audio data to a temporary file
|
| 26 |
+
with tempfile.NamedTemporaryFile(suffix=".wav", delete=False) as temp_file:
|
| 27 |
+
wavfile.write(temp_file.name, sample_rate, audio_data)
|
| 28 |
+
temp_file_path = temp_file.name
|
| 29 |
+
|
| 30 |
+
# Transcribe the audio file using OpenAI Whisper API
|
| 31 |
+
with open(temp_file_path, "rb") as audio_file:
|
| 32 |
+
transcription_response = openai_client.audio.transcriptions.create(
|
| 33 |
+
model="whisper-1",
|
| 34 |
+
file=audio_file,
|
| 35 |
+
language="en",
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
transcription_text = transcription_response.text
|
| 39 |
+
return transcription_text
|
| 40 |
+
|
| 41 |
+
def tts(response_text):
|
| 42 |
+
# Now, use ElevenLabs to convert the transcription text to speech
|
| 43 |
+
tts_response = elevenlabs_client.text_to_speech.convert(
|
| 44 |
+
voice_id="CwhRBWXzGAHq8TQ4Fs17",
|
| 45 |
+
optimize_streaming_latency="0",
|
| 46 |
+
output_format="mp3_22050_32",
|
| 47 |
+
text=response_text,
|
| 48 |
+
voice_settings=VoiceSettings(
|
| 49 |
+
stability=0.1,
|
| 50 |
+
similarity_boost=0.3,
|
| 51 |
+
style=0.2,
|
| 52 |
+
),
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
audio_file_path = "output_audio.mp3"
|
| 56 |
+
with open(audio_file_path, "wb") as audio_file:
|
| 57 |
+
for chunk in tts_response:
|
| 58 |
+
audio_file.write(chunk)
|
| 59 |
+
|
| 60 |
+
return audio_file_path
|
| 61 |
+
|
| 62 |
+
|
erp_core/assistant_class.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_anthropic import ChatAnthropic
|
| 2 |
+
from langchain_openai.chat_models import ChatOpenAI
|
| 3 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
| 4 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 5 |
+
from langchain_core.pydantic_v1 import BaseModel, Field
|
| 6 |
+
from langchain_core.runnables import Runnable, RunnableConfig
|
| 7 |
+
from langgraph.checkpoint.sqlite import SqliteSaver
|
| 8 |
+
from erp_core.state_definer import State
|
| 9 |
+
import time
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
import getpass
|
| 12 |
+
|
| 13 |
+
class Assistant:
|
| 14 |
+
"""
|
| 15 |
+
Assistant class to handle the conversation with the user.
|
| 16 |
+
"""
|
| 17 |
+
def __init__(self, runnable: Runnable):
|
| 18 |
+
self.runnable = runnable
|
| 19 |
+
|
| 20 |
+
def __call__(self, state: State, config: RunnableConfig):
|
| 21 |
+
while True:
|
| 22 |
+
result = self.runnable.invoke(state)
|
| 23 |
+
|
| 24 |
+
if not result.tool_calls and (
|
| 25 |
+
not result.content
|
| 26 |
+
or isinstance(result.content, list)
|
| 27 |
+
and not result.content[0].get("text")
|
| 28 |
+
):
|
| 29 |
+
messages = state["messages"] + [("user", "Respond with a real output.")]
|
| 30 |
+
state = {**state, "messages": messages}
|
| 31 |
+
messages = state["messages"] + [("user", "Respond with a real output.")]
|
| 32 |
+
state = {**state, "messages": messages}
|
| 33 |
+
else:
|
| 34 |
+
break
|
| 35 |
+
return {"messages": result}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class CompleteOrEscalate(BaseModel):
|
| 39 |
+
"""A tool to mark the current task as completed and/or to escalate control of the dialog to the main assistant,
|
| 40 |
+
who can re-route the dialog based on the user's needs."""
|
| 41 |
+
|
| 42 |
+
cancel: bool = True
|
| 43 |
+
reason: str
|
| 44 |
+
|
| 45 |
+
class Config:
|
| 46 |
+
schema_extra = {
|
| 47 |
+
"example": {
|
| 48 |
+
"cancel": True,
|
| 49 |
+
"reason": "User changed their mind about the current task.",
|
| 50 |
+
},
|
| 51 |
+
"example 2": {
|
| 52 |
+
"cancel": True,
|
| 53 |
+
"reason": "I have fully completed the task.",
|
| 54 |
+
},
|
| 55 |
+
"example 3": {
|
| 56 |
+
"cancel": False,
|
| 57 |
+
"reason": "I need to search the user's emails or calendar for more information.",
|
| 58 |
+
},
|
| 59 |
+
}
|
erp_core/config.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
model_name = "gpt-4o-mini"
|
| 2 |
+
#anthropic_model_name = "claude-3-5-sonnet-20240620"
|
erp_core/display_image.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from erp_core.node_builder import graph
|
| 2 |
+
|
| 3 |
+
try:
|
| 4 |
+
image_path = "output_image.png"
|
| 5 |
+
# Get the image bytes
|
| 6 |
+
image_data = graph.get_graph(xray=True).draw_mermaid_png()
|
| 7 |
+
|
| 8 |
+
# Save bytes to file
|
| 9 |
+
with open(image_path, 'wb') as f:
|
| 10 |
+
f.write(image_data)
|
| 11 |
+
|
| 12 |
+
print(f"Image saved at {image_path}")
|
| 13 |
+
except Exception as e:
|
| 14 |
+
print(f"An error occurred: {e}")
|
erp_core/entry_node.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Callable
|
| 2 |
+
|
| 3 |
+
from langchain_core.messages import ToolMessage
|
| 4 |
+
from erp_core.state_definer import State
|
| 5 |
+
|
| 6 |
+
def create_entry_node(assistant_name: str, new_dialog_state: str) -> Callable:
|
| 7 |
+
def entry_node(state: State) -> dict:
|
| 8 |
+
tool_call_id = state["messages"][-1].tool_calls[0]["id"]
|
| 9 |
+
return {
|
| 10 |
+
"messages": [
|
| 11 |
+
ToolMessage(
|
| 12 |
+
content=f"The assistant is now the {assistant_name}. Reflect on the above conversation between the host assistant and the user."
|
| 13 |
+
f" The user's intent is unsatisfied. Use the provided tools to assist the user. Remember, you are {assistant_name},"
|
| 14 |
+
" and the booking, update, other other action is not complete until after you have successfully invoked the appropriate tool."
|
| 15 |
+
" If the user changes their mind or needs help for other tasks, call the CompleteOrEscalate function to let the primary host assistant take control."
|
| 16 |
+
" Do not mention who you are - just act as the proxy for the assistant.",
|
| 17 |
+
tool_call_id=tool_call_id,
|
| 18 |
+
)
|
| 19 |
+
],
|
| 20 |
+
"dialog_state": new_dialog_state,
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
return entry_node
|
erp_core/node_builder.py
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Literal
|
| 2 |
+
|
| 3 |
+
from erp_core.state_definer import State
|
| 4 |
+
from langchain_core.messages import ToolMessage
|
| 5 |
+
from erp_core._event import create_tool_node_with_fallback
|
| 6 |
+
from erp_core.assistant_class import Assistant, CompleteOrEscalate
|
| 7 |
+
from erp_core.entry_node import create_entry_node
|
| 8 |
+
from langgraph.graph import StateGraph
|
| 9 |
+
from langgraph.prebuilt import tools_condition
|
| 10 |
+
from langgraph.graph import END, StateGraph, START
|
| 11 |
+
from operator import __and__
|
| 12 |
+
from langgraph.checkpoint.memory import MemorySaver
|
| 13 |
+
# from langgraph.checkpoint.sqlite import SqliteSaver
|
| 14 |
+
|
| 15 |
+
from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools
|
| 16 |
+
from erp_core.runnable.scm_prompt import supply_chain_management_runnable, supply_chain_management_tools
|
| 17 |
+
from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools
|
| 18 |
+
from erp_core.runnable.pm_prompt import project_management_runnable, project_management_tools
|
| 19 |
+
from erp_core.runnable.crm_prompt import customer_relationship_management_runnable, customer_relationship_management_tools
|
| 20 |
+
from erp_core.runnable.primary_assistant_prompt import assistant_runnable, primary_assistant_tools
|
| 21 |
+
|
| 22 |
+
from erp_core.tool_binder.tool_binder import ToHumanResourceDepartment, ToFinancialManagementDepartment, ToSupplyChainManagementDepartment, ToProjectManagementDepartment, ToCustomerRelationshipManagementDepartment
|
| 23 |
+
builder = StateGraph(State)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# fetch user info
|
| 27 |
+
# ........................................................................
|
| 28 |
+
def user_info(state: State):
|
| 29 |
+
return {"user_info": ""}
|
| 30 |
+
|
| 31 |
+
builder.add_node("fetch_user_info", user_info)
|
| 32 |
+
builder.add_edge(START, "fetch_user_info")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# financial management assistant
|
| 36 |
+
# ........................................................................
|
| 37 |
+
|
| 38 |
+
builder.add_node("enter_financial_management", create_entry_node("Financial Management Assistant", "financial_management"))
|
| 39 |
+
builder.add_node("financial_management", Assistant(financial_management_runnable))
|
| 40 |
+
builder.add_edge("enter_financial_management", "financial_management")
|
| 41 |
+
builder.add_node("financial_management_tools", create_tool_node_with_fallback(financial_management_tools))
|
| 42 |
+
|
| 43 |
+
def route_financial_management(
|
| 44 |
+
state: State,
|
| 45 |
+
) -> Literal[
|
| 46 |
+
"financial_management_tools",
|
| 47 |
+
"leave_skill",
|
| 48 |
+
"__end__",
|
| 49 |
+
]:
|
| 50 |
+
route = tools_condition(state)
|
| 51 |
+
if route == END:
|
| 52 |
+
return END
|
| 53 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 54 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 55 |
+
if did_cancel:
|
| 56 |
+
return "leave_skill"
|
| 57 |
+
safe_toolnames = [t.name for t in financial_management_tools]
|
| 58 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 59 |
+
return "financial_management_tools"
|
| 60 |
+
return "financial_management_tools"
|
| 61 |
+
|
| 62 |
+
builder.add_edge("financial_management_tools", "financial_management")
|
| 63 |
+
builder.add_conditional_edges("financial_management", route_financial_management)
|
| 64 |
+
|
| 65 |
+
# supply chain management assistant
|
| 66 |
+
# ........................................................................
|
| 67 |
+
|
| 68 |
+
builder.add_node("enter_supply_chain_management", create_entry_node("Supply Chain Management Assistant", "supply_chain_management"))
|
| 69 |
+
builder.add_node("supply_chain_management", Assistant(supply_chain_management_runnable))
|
| 70 |
+
builder.add_edge("enter_supply_chain_management", "supply_chain_management")
|
| 71 |
+
builder.add_node("supply_chain_management_tools", create_tool_node_with_fallback(supply_chain_management_tools))
|
| 72 |
+
|
| 73 |
+
def route_supply_chain_management(
|
| 74 |
+
state: State,
|
| 75 |
+
) -> Literal[
|
| 76 |
+
"supply_chain_management_tools",
|
| 77 |
+
"leave_skill",
|
| 78 |
+
"__end__",
|
| 79 |
+
]:
|
| 80 |
+
route = tools_condition(state)
|
| 81 |
+
if route == END:
|
| 82 |
+
return END
|
| 83 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 84 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 85 |
+
if did_cancel:
|
| 86 |
+
return "leave_skill"
|
| 87 |
+
safe_toolnames = [t.name for t in supply_chain_management_tools]
|
| 88 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 89 |
+
return "supply_chain_management_tools"
|
| 90 |
+
return "supply_chain_management_tools"
|
| 91 |
+
|
| 92 |
+
builder.add_edge("supply_chain_management_tools", "supply_chain_management")
|
| 93 |
+
builder.add_conditional_edges("supply_chain_management", route_supply_chain_management)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
# human resource assistant
|
| 98 |
+
# ........................................................................
|
| 99 |
+
|
| 100 |
+
builder.add_node("enter_human_resource", create_entry_node("Human Resource Assistant", "human_resource"))
|
| 101 |
+
builder.add_node("human_resource", Assistant(human_resource_runnable))
|
| 102 |
+
builder.add_edge("enter_human_resource", "human_resource")
|
| 103 |
+
builder.add_node("human_resource_tools", create_tool_node_with_fallback(human_resource_tools))
|
| 104 |
+
|
| 105 |
+
def route_human_resource(
|
| 106 |
+
state: State,
|
| 107 |
+
) -> Literal[
|
| 108 |
+
"human_resource_tools",
|
| 109 |
+
"leave_skill",
|
| 110 |
+
"__end__",
|
| 111 |
+
]:
|
| 112 |
+
route = tools_condition(state)
|
| 113 |
+
if route == END:
|
| 114 |
+
return END # end the graph
|
| 115 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 116 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 117 |
+
if did_cancel:
|
| 118 |
+
return "leave_skill"
|
| 119 |
+
|
| 120 |
+
safe_toolnames = [t.name for t in human_resource_tools]
|
| 121 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 122 |
+
return "human_resource_tools"
|
| 123 |
+
return "human_resource_tools"
|
| 124 |
+
|
| 125 |
+
builder.add_edge("human_resource_tools", "human_resource")
|
| 126 |
+
builder.add_conditional_edges("human_resource", route_human_resource)
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# Project management assistant
|
| 130 |
+
# ........................................................................
|
| 131 |
+
|
| 132 |
+
builder.add_node("enter_project_management", create_entry_node("Project Management Assistant", "project_management"))
|
| 133 |
+
builder.add_node("project_management", Assistant(project_management_runnable))
|
| 134 |
+
builder.add_edge("enter_project_management", "project_management")
|
| 135 |
+
builder.add_node("project_management_tools", create_tool_node_with_fallback(project_management_tools))
|
| 136 |
+
|
| 137 |
+
def route_project_management(
|
| 138 |
+
state: State,
|
| 139 |
+
) -> Literal[
|
| 140 |
+
"project_management_tools",
|
| 141 |
+
"leave_skill",
|
| 142 |
+
"__end__",
|
| 143 |
+
]:
|
| 144 |
+
route = tools_condition(state)
|
| 145 |
+
if route == END:
|
| 146 |
+
return END
|
| 147 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 148 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 149 |
+
if did_cancel:
|
| 150 |
+
return "leave_skill"
|
| 151 |
+
safe_toolnames = [t.name for t in project_management_tools]
|
| 152 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 153 |
+
return "project_management_tools"
|
| 154 |
+
return "project_management_tools"
|
| 155 |
+
|
| 156 |
+
builder.add_edge("project_management_tools", "project_management")
|
| 157 |
+
builder.add_conditional_edges("project_management", route_project_management)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
# customer relationship management assistant
|
| 161 |
+
# ........................................................................
|
| 162 |
+
builder.add_node("enter_customer_relationship_management", create_entry_node("Customer Relationship Management Assistant", "customer_relationship_management"))
|
| 163 |
+
builder.add_node("customer_relationship_management", Assistant(customer_relationship_management_runnable))
|
| 164 |
+
builder.add_edge("enter_customer_relationship_management", "customer_relationship_management")
|
| 165 |
+
builder.add_node("customer_relationship_management_tools", create_tool_node_with_fallback(customer_relationship_management_tools))
|
| 166 |
+
|
| 167 |
+
def route_customer_relationship_management(
|
| 168 |
+
state: State,
|
| 169 |
+
) -> Literal[
|
| 170 |
+
"customer_relationship_management_tools",
|
| 171 |
+
"leave_skill",
|
| 172 |
+
"__end__",
|
| 173 |
+
]:
|
| 174 |
+
route = tools_condition(state)
|
| 175 |
+
if route == END:
|
| 176 |
+
return END
|
| 177 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 178 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 179 |
+
if did_cancel:
|
| 180 |
+
return "leave_skill"
|
| 181 |
+
safe_toolnames = [t.name for t in customer_relationship_management_tools]
|
| 182 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 183 |
+
return "customer_relationship_management_tools"
|
| 184 |
+
return "customer_relationship_management_tools"
|
| 185 |
+
|
| 186 |
+
builder.add_edge("customer_relationship_management_tools", "customer_relationship_management")
|
| 187 |
+
builder.add_conditional_edges("customer_relationship_management", route_customer_relationship_management)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
# leave skill
|
| 191 |
+
# ........................................................................
|
| 192 |
+
|
| 193 |
+
def pop_dialog_state(state: State) -> dict:
|
| 194 |
+
"""Pop the dialog stack and return to the main assistant.
|
| 195 |
+
|
| 196 |
+
This lets the full graph explicitly track the dialog flow and delegate control
|
| 197 |
+
to specific sub-graphs.
|
| 198 |
+
"""
|
| 199 |
+
messages = []
|
| 200 |
+
if state["messages"][-1].tool_calls:
|
| 201 |
+
# Note: Doesn't currently handle the edge case where the llm performs parallel tool calls
|
| 202 |
+
messages.append(
|
| 203 |
+
ToolMessage(
|
| 204 |
+
content="Resuming dialog with the host assistant. Please reflect on the past conversation and assist the user as needed.",
|
| 205 |
+
tool_call_id=state["messages"][-1].tool_calls[0]["id"],
|
| 206 |
+
)
|
| 207 |
+
)
|
| 208 |
+
return {
|
| 209 |
+
"dialog_state": "pop",
|
| 210 |
+
"messages": messages,
|
| 211 |
+
}
|
| 212 |
+
|
| 213 |
+
builder.add_node("leave_skill", pop_dialog_state)
|
| 214 |
+
builder.add_edge("leave_skill", "primary_assistant")
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
# primary assistant
|
| 218 |
+
# ........................................................................
|
| 219 |
+
|
| 220 |
+
builder.add_node("primary_assistant", Assistant(assistant_runnable))
|
| 221 |
+
builder.add_node("primary_assistant_tools", create_tool_node_with_fallback(primary_assistant_tools))
|
| 222 |
+
|
| 223 |
+
def route_primary_assistant(
|
| 224 |
+
state: State,
|
| 225 |
+
) -> Literal[
|
| 226 |
+
"primary_assistant_tools",
|
| 227 |
+
"enter_human_resource",
|
| 228 |
+
"enter_financial_management",
|
| 229 |
+
"enter_supply_chain_management",
|
| 230 |
+
"enter_project_management",
|
| 231 |
+
"enter_customer_relationship_management",
|
| 232 |
+
"__and__",
|
| 233 |
+
]:
|
| 234 |
+
route = tools_condition(state)
|
| 235 |
+
if route == END:
|
| 236 |
+
return END
|
| 237 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 238 |
+
if tool_calls:
|
| 239 |
+
if tool_calls[0]["name"] == ToHumanResourceDepartment.__name__:
|
| 240 |
+
return "enter_human_resource"
|
| 241 |
+
elif tool_calls[0]["name"] == ToFinancialManagementDepartment.__name__:
|
| 242 |
+
return "enter_financial_management"
|
| 243 |
+
elif tool_calls[0]["name"] == ToSupplyChainManagementDepartment.__name__:
|
| 244 |
+
return "enter_supply_chain_management"
|
| 245 |
+
elif tool_calls[0]["name"] == ToProjectManagementDepartment.__name__:
|
| 246 |
+
return "enter_project_management"
|
| 247 |
+
elif tool_calls[0]["name"] == ToCustomerRelationshipManagementDepartment.__name__:
|
| 248 |
+
return "enter_customer_relationship_management"
|
| 249 |
+
return "primary_assistant_tools"
|
| 250 |
+
raise ValueError("Invalid route")
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
# The assistant can route to one of the delegated assistants,
|
| 254 |
+
# directly use a tool, or directly respond to the user
|
| 255 |
+
builder.add_conditional_edges(
|
| 256 |
+
"primary_assistant",
|
| 257 |
+
route_primary_assistant,
|
| 258 |
+
{
|
| 259 |
+
"enter_human_resource": "enter_human_resource",
|
| 260 |
+
"enter_financial_management": "enter_financial_management",
|
| 261 |
+
"enter_supply_chain_management": "enter_supply_chain_management",
|
| 262 |
+
"enter_project_management": "enter_project_management",
|
| 263 |
+
"enter_customer_relationship_management": "enter_customer_relationship_management",
|
| 264 |
+
"primary_assistant_tools": "primary_assistant_tools",
|
| 265 |
+
END: END,
|
| 266 |
+
},
|
| 267 |
+
)
|
| 268 |
+
builder.add_edge("primary_assistant_tools", "primary_assistant")
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
# Each delegated workflow can directly respond to the user
|
| 272 |
+
# When the user responds, we want to return to the currently active workflow
|
| 273 |
+
def route_to_workflow(
|
| 274 |
+
state: State,
|
| 275 |
+
) -> Literal[
|
| 276 |
+
"primary_assistant",
|
| 277 |
+
"human_resource",
|
| 278 |
+
"financial_management",
|
| 279 |
+
"supply_chain_management",
|
| 280 |
+
"project_management",
|
| 281 |
+
"customer_relationship_management",
|
| 282 |
+
]:
|
| 283 |
+
"""If we are in a delegated state, route directly to the appropriate assistant."""
|
| 284 |
+
dialog_state = state.get("dialog_state")
|
| 285 |
+
if not dialog_state:
|
| 286 |
+
return "primary_assistant"
|
| 287 |
+
return dialog_state[-1]
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
builder.add_conditional_edges("fetch_user_info", route_to_workflow)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
# Compile graph
|
| 294 |
+
def compile_graph():
|
| 295 |
+
memory = MemorySaver()
|
| 296 |
+
graph = builder.compile(checkpointer=memory)
|
| 297 |
+
return graph
|
erp_core/node_builder/customer_relationship_management_node.py
ADDED
|
File without changes
|
erp_core/node_builder/finalcial_management_node.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Literal
|
| 2 |
+
|
| 3 |
+
from erp_core.state_definer import State
|
| 4 |
+
from langchain_core.messages import ToolMessage
|
| 5 |
+
from erp_core._event import create_tool_node_with_fallback
|
| 6 |
+
from erp_core.assistant_class import Assistant, CompleteOrEscalate
|
| 7 |
+
from erp_core.entry_node import create_entry_node
|
| 8 |
+
from langgraph.graph import StateGraph
|
| 9 |
+
from langgraph.prebuilt import tools_condition
|
| 10 |
+
from langgraph.graph import END, StateGraph, START
|
| 11 |
+
|
| 12 |
+
from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools
|
| 13 |
+
|
| 14 |
+
builder = StateGraph(State)
|
| 15 |
+
|
| 16 |
+
builder.add_node("enter_financial_management", create_entry_node("Financial Management Assistant", "financial_management"))
|
| 17 |
+
builder.add_node("financial_management", Assistant(financial_management_runnable))
|
| 18 |
+
builder.add_edge("enter_financial_management", "financial_management")
|
| 19 |
+
builder.add_node("financial_management_tools", create_tool_node_with_fallback(financial_management_tools))
|
| 20 |
+
|
| 21 |
+
def route_financial_management(
|
| 22 |
+
state: State,
|
| 23 |
+
) -> Literal[
|
| 24 |
+
"financial_management_tools",
|
| 25 |
+
"leave_skill",
|
| 26 |
+
"__end__",
|
| 27 |
+
]:
|
| 28 |
+
route = tools_condition(state)
|
| 29 |
+
if route == END:
|
| 30 |
+
return END
|
| 31 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 32 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 33 |
+
if did_cancel:
|
| 34 |
+
return "leave_skill"
|
| 35 |
+
safe_toolnames = [t.name for t in financial_management_tools]
|
| 36 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 37 |
+
return "financial_management_tools"
|
| 38 |
+
return "financial_management_tools"
|
| 39 |
+
|
| 40 |
+
builder.add_edge("financial_management_tools", "financial_management")
|
| 41 |
+
builder.add_conditional_edges("financial_management", route_financial_management)
|
erp_core/node_builder/graph_builder_node.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Literal
|
| 2 |
+
|
| 3 |
+
from erp_core.state_definer import State
|
| 4 |
+
from langchain_core.messages import ToolMessage
|
| 5 |
+
from erp_core._event import create_tool_node_with_fallback
|
| 6 |
+
from erp_core.assistant_class import Assistant, CompleteOrEscalate
|
| 7 |
+
from erp_core.entry_node import create_entry_node
|
| 8 |
+
from langgraph.graph import StateGraph
|
| 9 |
+
from langgraph.prebuilt import tools_condition
|
| 10 |
+
from langgraph.graph import END, StateGraph, START
|
| 11 |
+
|
| 12 |
+
from erp_core.runnable.fm_prompt import financial_management_runnable, financial_management_tools
|
| 13 |
+
from erp_core.runnable.scm_prompt import supply_chain_management_runnable, supply_chain_management_tools
|
| 14 |
+
from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools
|
| 15 |
+
from erp_core.runnable.pm_prompt import project_management_runnable, project_management_tools
|
| 16 |
+
from erp_core.runnable.crm_prompt import customer_relationship_management_runnable, customer_relationship_management_tools
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
builder = StateGraph(State)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def user_info(state: State):
|
| 23 |
+
return {"user_info": "Kamal Ahmed, mobile number: 1234567890"}
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
builder.add_node("fetch_user_info", user_info)
|
| 27 |
+
builder.add_edge(START, "fetch_user_info")
|
| 28 |
+
|
| 29 |
+
def pop_dialog_state(state: State) -> dict:
|
| 30 |
+
"""Pop the dialog stack and return to the main assistant.
|
| 31 |
+
|
| 32 |
+
This lets the full graph explicitly track the dialog flow and delegate control
|
| 33 |
+
to specific sub-graphs.
|
| 34 |
+
"""
|
| 35 |
+
messages = []
|
| 36 |
+
if state["messages"][-1].tool_calls:
|
| 37 |
+
# Note: Doesn't currently handle the edge case where the llm performs parallel tool calls
|
| 38 |
+
messages.append(
|
| 39 |
+
ToolMessage(
|
| 40 |
+
content="Resuming dialog with the host assistant. Please reflect on the past conversation and assist the user as needed.",
|
| 41 |
+
tool_call_id=state["messages"][-1].tool_calls[0]["id"],
|
| 42 |
+
)
|
| 43 |
+
)
|
| 44 |
+
return {
|
| 45 |
+
"dialog_state": "pop",
|
| 46 |
+
"messages": messages,
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
builder.add_node("leave_skill", pop_dialog_state)
|
| 50 |
+
builder.add_edge("leave_skill", "primary_assistant")
|
erp_core/node_builder/human_resource_node.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Literal
|
| 2 |
+
|
| 3 |
+
from erp_core.state_definer import State
|
| 4 |
+
from langchain_core.messages import ToolMessage
|
| 5 |
+
from erp_core._event import create_tool_node_with_fallback
|
| 6 |
+
from erp_core.assistant_class import Assistant, CompleteOrEscalate
|
| 7 |
+
from erp_core.entry_node import create_entry_node
|
| 8 |
+
from langgraph.graph import StateGraph
|
| 9 |
+
from langgraph.prebuilt import tools_condition
|
| 10 |
+
from langgraph.graph import END, StateGraph, START
|
| 11 |
+
|
| 12 |
+
from erp_core.runnable.hr_prompt import human_resource_runnable, human_resource_tools
|
| 13 |
+
|
| 14 |
+
builder = StateGraph(State)
|
| 15 |
+
|
| 16 |
+
builder.add_node("enter_human_resource_management", create_entry_node("Human Resource Management Assistant", "human_resource_management"))
|
| 17 |
+
builder.add_node("human_resource_management", Assistant(human_resource_runnable))
|
| 18 |
+
builder.add_edge("enter_human_resource_management", "human_resource_management")
|
| 19 |
+
builder.add_node("human_resource_management_tools", create_tool_node_with_fallback(human_resource_tools))
|
| 20 |
+
|
| 21 |
+
def route_human_resource_management(
|
| 22 |
+
state: State,
|
| 23 |
+
) -> Literal[
|
| 24 |
+
"human_resource_management_tools",
|
| 25 |
+
"leave_skill",
|
| 26 |
+
"__end__",
|
| 27 |
+
]:
|
| 28 |
+
route = tools_condition(state)
|
| 29 |
+
if route == END:
|
| 30 |
+
return END # end the graph
|
| 31 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 32 |
+
did_cancel = any(tc["name"] == CompleteOrEscalate.__name__ for tc in tool_calls)
|
| 33 |
+
if did_cancel:
|
| 34 |
+
return "leave_skill"
|
| 35 |
+
|
| 36 |
+
safe_toolnames = [t.name for t in human_resource_tools]
|
| 37 |
+
if all(tc["name"] in safe_toolnames for tc in tool_calls):
|
| 38 |
+
return "human_resource_management_tools"
|
| 39 |
+
return "human_resource_management_tools"
|
| 40 |
+
|
| 41 |
+
builder.add_edge("human_resource_management_tools", "human_resource_management")
|
| 42 |
+
builder.add_conditional_edges("human_resource_management", route_human_resource_management)
|
erp_core/node_builder/primary_assistant_node.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from operator import __and__
|
| 2 |
+
from langgraph.checkpoint.sqlite import SqliteSaver
|
| 3 |
+
|
| 4 |
+
# Primary assistant
|
| 5 |
+
builder.add_node("primary_assistant", Assistant(assistant_runnable))
|
| 6 |
+
builder.add_node(
|
| 7 |
+
"primary_assistant_tools", create_tool_node_with_fallback(primary_assistant_tools)
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def route_primary_assistant(
|
| 12 |
+
state: State,
|
| 13 |
+
) -> Literal[
|
| 14 |
+
"primary_assistant_tools",
|
| 15 |
+
"enter_internet_problem",
|
| 16 |
+
"enter_outgoing_call_problem",
|
| 17 |
+
"__and__",
|
| 18 |
+
]:
|
| 19 |
+
route = tools_condition(state)
|
| 20 |
+
if route == END:
|
| 21 |
+
return END
|
| 22 |
+
tool_calls = state["messages"][-1].tool_calls
|
| 23 |
+
if tool_calls:
|
| 24 |
+
if tool_calls[0]["name"] == ToInternetProblem.__name__:
|
| 25 |
+
return "enter_internet_problem"
|
| 26 |
+
elif tool_calls[0]["name"] == ToOutgoingCallProblem.__name__:
|
| 27 |
+
return "enter_outgoing_call_problem"
|
| 28 |
+
return "primary_assistant_tools"
|
| 29 |
+
raise ValueError("Invalid route")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# The assistant can route to one of the delegated assistants,
|
| 33 |
+
# directly use a tool, or directly respond to the user
|
| 34 |
+
builder.add_conditional_edges(
|
| 35 |
+
"primary_assistant",
|
| 36 |
+
route_primary_assistant,
|
| 37 |
+
{
|
| 38 |
+
"enter_internet_problem": "enter_internet_problem",
|
| 39 |
+
"enter_outgoing_call_problem": "enter_outgoing_call_problem",
|
| 40 |
+
"primary_assistant_tools": "primary_assistant_tools",
|
| 41 |
+
END: END,
|
| 42 |
+
},
|
| 43 |
+
)
|
| 44 |
+
builder.add_edge("primary_assistant_tools", "primary_assistant")
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
# Each delegated workflow can directly respond to the user
|
| 48 |
+
# When the user responds, we want to return to the currently active workflow
|
| 49 |
+
def route_to_workflow(
|
| 50 |
+
state: State,
|
| 51 |
+
) -> Literal[
|
| 52 |
+
"primary_assistant",
|
| 53 |
+
"internet_problem",
|
| 54 |
+
"outgoing_call_problem",
|
| 55 |
+
]:
|
| 56 |
+
"""If we are in a delegated state, route directly to the appropriate assistant."""
|
| 57 |
+
dialog_state = state.get("dialog_state")
|
| 58 |
+
if not dialog_state:
|
| 59 |
+
return "primary_assistant"
|
| 60 |
+
return dialog_state[-1]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
builder.add_conditional_edges("fetch_user_info", route_to_workflow)
|
| 64 |
+
|
| 65 |
+
# Compile graph
|
| 66 |
+
memory = SqliteSaver.from_conn_string(":memory:")
|
| 67 |
+
graph = builder.compile(
|
| 68 |
+
checkpointer=memory,
|
| 69 |
+
# Let the user approve or deny the use of sensitive tools
|
| 70 |
+
)
|
erp_core/node_builder/project_management_node.py
ADDED
|
File without changes
|
erp_core/node_builder/supply_chain_management_node.py
ADDED
|
File without changes
|
erp_core/runnable/__pycache__/crm_prompt.cpython-311.pyc
ADDED
|
Binary file (2.03 kB). View file
|
|
|
erp_core/runnable/__pycache__/fm_prompt.cpython-311.pyc
ADDED
|
Binary file (2.14 kB). View file
|
|
|
erp_core/runnable/__pycache__/hr_prompt.cpython-311.pyc
ADDED
|
Binary file (1.97 kB). View file
|
|
|
erp_core/runnable/__pycache__/pm_prompt.cpython-311.pyc
ADDED
|
Binary file (2.05 kB). View file
|
|
|
erp_core/runnable/__pycache__/primary_assistant_prompt.cpython-311.pyc
ADDED
|
Binary file (3.2 kB). View file
|
|
|
erp_core/runnable/__pycache__/scm_prompt.cpython-311.pyc
ADDED
|
Binary file (2.3 kB). View file
|
|
|
erp_core/runnable/crm_prompt.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 3 |
+
|
| 4 |
+
from erp_core.Tools.customer_relationship_management import customer_support
|
| 5 |
+
from erp_core.assistant_class import CompleteOrEscalate
|
| 6 |
+
from erp_core._llm import llm
|
| 7 |
+
customer_relationship_management_prompt = ChatPromptTemplate.from_messages(
|
| 8 |
+
[
|
| 9 |
+
(
|
| 10 |
+
"system",
|
| 11 |
+
"You are a specialized assistant for handling customer relationship management issues. "
|
| 12 |
+
"The primary assistant delegates work to you whenever the user needs help with their customer relationship management problems. "
|
| 13 |
+
"Introduce yourself as a customer relationship management assistant"
|
| 14 |
+
"Start conversation respectfully."
|
| 15 |
+
"Diagnose the user query based on the user's input"
|
| 16 |
+
"If any information is missing to call proper tool, ask the user for clarification."
|
| 17 |
+
"While ready to call tool ask the user for confirmation once again by repeating the user's query."
|
| 18 |
+
"If the user confirms that it is correct only then call proper tool to solve user query. It is very important."
|
| 19 |
+
"Remember that an issue isn't resolved until the relevant tool or method has successfully been used."
|
| 20 |
+
"\n\nCurrent time: {time}."
|
| 21 |
+
"\n\nIf the user needs help, and none of your tools are appropriate for it, then"
|
| 22 |
+
' "CompleteOrEscalate" the dialog to the host assistant. Do not waste the user\'s time. Do not make up invalid tools or functions.',
|
| 23 |
+
),
|
| 24 |
+
("placeholder", "{messages}"),
|
| 25 |
+
]
|
| 26 |
+
).partial(time=datetime.now())
|
| 27 |
+
|
| 28 |
+
customer_relationship_management_tools = [customer_support]
|
| 29 |
+
customer_relationship_management_runnable = customer_relationship_management_prompt | llm.bind_tools(
|
| 30 |
+
customer_relationship_management_tools + [CompleteOrEscalate]
|
| 31 |
+
)
|
erp_core/runnable/fm_prompt.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 3 |
+
|
| 4 |
+
from erp_core.Tools.finalcial_management import register_purchase_request, view_expense_report
|
| 5 |
+
from erp_core.assistant_class import CompleteOrEscalate
|
| 6 |
+
from erp_core._llm import llm
|
| 7 |
+
financial_management_prompt = ChatPromptTemplate.from_messages(
|
| 8 |
+
[
|
| 9 |
+
(
|
| 10 |
+
"system",
|
| 11 |
+
"You are a specialized assistant for handling financial management issues. "
|
| 12 |
+
"The primary assistant delegates work to you whenever the user needs help with their financial management problems. "
|
| 13 |
+
"Introduce yourself as a financial management assistant"
|
| 14 |
+
"Start conversation respectfully."
|
| 15 |
+
"Diagnose the user query based on the user's input"
|
| 16 |
+
"If any information is missing to call proper tool, ask the user for clarification."
|
| 17 |
+
"While ready to call tool ask the user for confirmation once again by repeating the user's query. This is very important"
|
| 18 |
+
"If the user confirms that it is correct only then call proper tool to solve user query. It is very important."
|
| 19 |
+
"Remember that an issue isn't resolved until the relevant tool or method has successfully been used."
|
| 20 |
+
"\nCurrent time: {time}."
|
| 21 |
+
'\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.'
|
| 22 |
+
"Do not make up invalid tools or functions."
|
| 23 |
+
"\n\nSome examples for which you should CompleteOrEscalate:\n"
|
| 24 |
+
" - 'what's the weather like this time of year?'\n"
|
| 25 |
+
" - 'nevermind I think I'll try again later'\n"
|
| 26 |
+
" - 'Financial management issue resolved'",
|
| 27 |
+
),
|
| 28 |
+
("placeholder", "{messages}"),
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
).partial(time=datetime.now())
|
| 32 |
+
|
| 33 |
+
financial_management_tools = [register_purchase_request, view_expense_report]
|
| 34 |
+
financial_management_runnable = financial_management_prompt | llm.bind_tools(
|
| 35 |
+
financial_management_tools + [CompleteOrEscalate]
|
| 36 |
+
)
|
erp_core/runnable/hr_prompt.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 3 |
+
|
| 4 |
+
from erp_core.Tools.human_resource import employee_database_access, leave_management
|
| 5 |
+
from erp_core.assistant_class import CompleteOrEscalate
|
| 6 |
+
from erp_core._llm import llm
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
human_resource_prompt = ChatPromptTemplate.from_messages(
|
| 10 |
+
[
|
| 11 |
+
(
|
| 12 |
+
"system",
|
| 13 |
+
"You are a specialized assistant for handling human resource issues. "
|
| 14 |
+
"The primary assistant delegates work to you whenever the user needs help with their human resource problems. "
|
| 15 |
+
"Introduce yourself as a human resource assistant"
|
| 16 |
+
"Start conversation respectfully."
|
| 17 |
+
"Diagnose the user query based on the user's input"
|
| 18 |
+
"If any information is missing to call proper tool, ask the user for clarification."
|
| 19 |
+
"While ready to call tool ask the user for confirmation once again by repeating the user's query."
|
| 20 |
+
"If the user confirms that it is correct only then call proper tool to solve user query. It is very important."
|
| 21 |
+
"Remember that an issue isn't resolved until the relevant tool or method has successfully been used."
|
| 22 |
+
"\n\nCurrent user human resource information:\n\n{user_info}\n"
|
| 23 |
+
"\nCurrent time: {time}."
|
| 24 |
+
"\n\nIf the user needs help, and none of your tools are appropriate for it, then"
|
| 25 |
+
' "CompleteOrEscalate" the dialog to the host assistant. Do not make up invalid tools or functions.',
|
| 26 |
+
),
|
| 27 |
+
("placeholder", "{messages}"),
|
| 28 |
+
]
|
| 29 |
+
).partial(time=datetime.now())
|
| 30 |
+
|
| 31 |
+
human_resource_tools = [
|
| 32 |
+
employee_database_access,
|
| 33 |
+
leave_management
|
| 34 |
+
]
|
| 35 |
+
human_resource_runnable = human_resource_prompt | llm.bind_tools(
|
| 36 |
+
human_resource_tools + [CompleteOrEscalate]
|
| 37 |
+
)
|
erp_core/runnable/pm_prompt.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 3 |
+
|
| 4 |
+
from erp_core.Tools.project_management import project_status_check
|
| 5 |
+
from erp_core.assistant_class import CompleteOrEscalate
|
| 6 |
+
from erp_core._llm import llm
|
| 7 |
+
|
| 8 |
+
project_management_prompt = ChatPromptTemplate.from_messages(
|
| 9 |
+
[
|
| 10 |
+
(
|
| 11 |
+
"system",
|
| 12 |
+
"You are a specialized assistant for handling project management issues. "
|
| 13 |
+
"The primary assistant delegates work to you whenever the user needs help troubleshooting issues with project management. "
|
| 14 |
+
"Introduce yourself as a project management assistant"
|
| 15 |
+
"Start conversation respectfully."
|
| 16 |
+
"Diagnose the user query based on the user's input"
|
| 17 |
+
"If any information is missing to call proper tool, ask the user for clarification."
|
| 18 |
+
"While ready to call tool ask the user for confirmation once again by repeating the user's query."
|
| 19 |
+
"If the user confirms that it is correct only then call proper tool to solve user query. It is very important."
|
| 20 |
+
"Remember that an issue isn't resolved until the relevant tool or method has successfully been used."
|
| 21 |
+
"\nCurrent time: {time}."
|
| 22 |
+
'\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.'
|
| 23 |
+
"Do not make up invalid tools or functions."
|
| 24 |
+
"\n\nSome examples for which you should CompleteOrEscalate:\n"
|
| 25 |
+
" - 'what's the weather like this time of year?'\n"
|
| 26 |
+
" - 'nevermind I think I'll try again later'\n",
|
| 27 |
+
),
|
| 28 |
+
("placeholder", "{messages}"),
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
).partial(time=datetime.now())
|
| 32 |
+
|
| 33 |
+
project_management_tools = [project_status_check]
|
| 34 |
+
project_management_runnable = project_management_prompt | llm.bind_tools(
|
| 35 |
+
project_management_tools + [CompleteOrEscalate]
|
| 36 |
+
)
|
erp_core/runnable/primary_assistant_prompt.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
|
| 4 |
+
from erp_core.tool_binder.tool_binder import (
|
| 5 |
+
ToCustomerRelationshipManagementDepartment,
|
| 6 |
+
ToFinancialManagementDepartment,
|
| 7 |
+
ToHumanResourceDepartment,
|
| 8 |
+
ToProjectManagementDepartment,
|
| 9 |
+
ToSupplyChainManagementDepartment
|
| 10 |
+
)
|
| 11 |
+
from erp_core._llm import llm
|
| 12 |
+
|
| 13 |
+
primary_assistant_prompt = ChatPromptTemplate.from_messages(
|
| 14 |
+
[
|
| 15 |
+
(
|
| 16 |
+
"system",
|
| 17 |
+
"You are an intelligent ERP support assistant, designed to assist users in navigating various departments within the ERP system and resolving their queries. "
|
| 18 |
+
"Your primary goal is to guide the user to the right department or help them complete specific tasks using the ERP tools at your disposal."
|
| 19 |
+
"No matter how user starts the conversation, always start respectfully."
|
| 20 |
+
"Introduce yourself as an ERP support assistant"
|
| 21 |
+
"Start conversation respectfully. Pay salam to user saying 'Assalamu Alaikum'. Do not say 'Wa Alaikum Assalam'."
|
| 22 |
+
"Do not pay salam in each turn. Pay salam only once per conversation."
|
| 23 |
+
"User will either speak in english or arabic. In most cases, user will speak in english."
|
| 24 |
+
"Detect the language And respond in the same language."
|
| 25 |
+
"Do not speak any other language than english or arabic. This is very important."
|
| 26 |
+
"For department-specific issues, route the user’s request to the appropriate department tool based on their needs."
|
| 27 |
+
"Carefully listen to the user's input, identify their requirement, and confirm the department or action needed."
|
| 28 |
+
"For registering purchase request or getting financial report, go to financial management department."
|
| 29 |
+
"For project status check, go to project management department."
|
| 30 |
+
"For managing customer support, go to customer relationship management department."
|
| 31 |
+
"For employee database access and leave management, go to human resource management department."
|
| 32 |
+
"For product quantity check, go to supply chain management department."
|
| 33 |
+
"If the user's request doesn’t align with any of the available departments, normally say 'I'm sorry, I don't know how to help with that.'"
|
| 34 |
+
"Be efficient and direct, avoid unnecessary steps or delays."
|
| 35 |
+
"Ensure the user is directed to the right department or help within the ERP system."
|
| 36 |
+
"\n\nCurrent user information:\n\n{user_info}\n"
|
| 37 |
+
"\nCurrent time: {time}."
|
| 38 |
+
'\n\nIf the user’s request is outside the scope of the ERP tools, or they change their mind, use "CompleteOrEscalate" to return to the main assistant.'
|
| 39 |
+
"Do not waste the user's time. Do not make up invalid tools or functions.",
|
| 40 |
+
),
|
| 41 |
+
("placeholder", "{messages}"),
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
).partial(time=datetime.now())
|
| 45 |
+
primary_assistant_tools = [
|
| 46 |
+
ToFinancialManagementDepartment,
|
| 47 |
+
ToProjectManagementDepartment,
|
| 48 |
+
ToCustomerRelationshipManagementDepartment,
|
| 49 |
+
ToHumanResourceDepartment,
|
| 50 |
+
ToSupplyChainManagementDepartment
|
| 51 |
+
]
|
| 52 |
+
assistant_runnable = primary_assistant_prompt | llm.bind_tools(
|
| 53 |
+
primary_assistant_tools
|
| 54 |
+
+ [
|
| 55 |
+
ToFinancialManagementDepartment,
|
| 56 |
+
ToProjectManagementDepartment,
|
| 57 |
+
ToCustomerRelationshipManagementDepartment,
|
| 58 |
+
ToHumanResourceDepartment,
|
| 59 |
+
ToSupplyChainManagementDepartment
|
| 60 |
+
]
|
| 61 |
+
)
|
erp_core/runnable/scm_prompt.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 3 |
+
|
| 4 |
+
from erp_core.Tools.supply_chain_management import product_quantity_check
|
| 5 |
+
from erp_core.assistant_class import CompleteOrEscalate
|
| 6 |
+
from erp_core._llm import llm
|
| 7 |
+
supply_chain_management_prompt = ChatPromptTemplate.from_messages(
|
| 8 |
+
[
|
| 9 |
+
(
|
| 10 |
+
"system",
|
| 11 |
+
"You are a specialized assistant for handling supply chain management issues. "
|
| 12 |
+
"The primary assistant delegates work to you whenever the user needs help troubleshooting issues with supply chain management. "
|
| 13 |
+
"Introduce yourself as a supply chain management assistant"
|
| 14 |
+
"Start conversation respectfully."
|
| 15 |
+
"Diagnose the problem based on the user's input and confirm the troubleshooting steps with the customer. "
|
| 16 |
+
"If any information is missing to call proper tool, ask the user for clarification."
|
| 17 |
+
"While ready to call tool ask the user for confirmation once again by repeating the user's query."
|
| 18 |
+
"If the user confirms that it is correct only then call proper tool to solve user query. It is very important."
|
| 19 |
+
"Remember that an issue isn't resolved until the relevant tool or method has successfully been used."
|
| 20 |
+
"\nCurrent time: {time}."
|
| 21 |
+
'\n\nIf the user needs help, and none of your tools are appropriate for it, then "CompleteOrEscalate" the dialog to the host assistant.'
|
| 22 |
+
" Do not waste the user's time. Do not make up invalid tools or functions."
|
| 23 |
+
"\n\nSome examples for which you should CompleteOrEscalate:\n"
|
| 24 |
+
" - 'what's the weather like this time of year?'\n"
|
| 25 |
+
" - 'nevermind I think I'll try again later'\n"
|
| 26 |
+
" - 'I need help with another issue instead'\n"
|
| 27 |
+
" - 'Oh wait, I think the problem resolved itself'\n"
|
| 28 |
+
" - 'Call issue resolved'",
|
| 29 |
+
),
|
| 30 |
+
("placeholder", "{messages}"),
|
| 31 |
+
]
|
| 32 |
+
|
| 33 |
+
).partial(time=datetime.now())
|
| 34 |
+
|
| 35 |
+
supply_chain_management_tools = [product_quantity_check]
|
| 36 |
+
supply_chain_management_runnable = supply_chain_management_prompt | llm.bind_tools(
|
| 37 |
+
supply_chain_management_tools + [CompleteOrEscalate]
|
| 38 |
+
)
|
erp_core/state_definer.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, Literal, Optional
|
| 2 |
+
|
| 3 |
+
from typing_extensions import TypedDict
|
| 4 |
+
|
| 5 |
+
from langgraph.graph.message import AnyMessage, add_messages
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def update_dialog_stack(left: list[str], right: Optional[str]) -> list[str]:
|
| 9 |
+
"""Push or pop the state."""
|
| 10 |
+
if right is None:
|
| 11 |
+
return left
|
| 12 |
+
if right == "pop":
|
| 13 |
+
return left[:-1]
|
| 14 |
+
return left + [right]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class State(TypedDict):
|
| 18 |
+
messages: Annotated[list[AnyMessage], add_messages]
|
| 19 |
+
user_info: str
|
| 20 |
+
dialog_state: Annotated[
|
| 21 |
+
list[
|
| 22 |
+
Literal[
|
| 23 |
+
"assistant",
|
| 24 |
+
"Human_Resource",
|
| 25 |
+
"Financial_Management",
|
| 26 |
+
"Supply_Chain_Management",
|
| 27 |
+
"Project_Management",
|
| 28 |
+
"Customer_Relationship_Management",
|
| 29 |
+
]
|
| 30 |
+
],
|
| 31 |
+
update_dialog_stack,
|
| 32 |
+
]
|
erp_core/tool_binder/__pycache__/tool_binder.cpython-311.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
erp_core/tool_binder/tool_binder.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.pydantic_v1 import BaseModel, Field
|
| 2 |
+
|
| 3 |
+
class ToFinancialManagementDepartment(BaseModel):
|
| 4 |
+
"""Transfers work to a specialized assistant to handle final management department issues."""
|
| 5 |
+
|
| 6 |
+
request: str = Field(
|
| 7 |
+
description="Any necessary followup questions the financial management department assistant should clarify before proceeding."
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
class ToProjectManagementDepartment(BaseModel):
|
| 11 |
+
"""Transfers work to a specialized assistant to handle project management issues."""
|
| 12 |
+
|
| 13 |
+
request: str = Field(
|
| 14 |
+
description="Any necessary followup questions the project management department assistant should clarify before proceeding."
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
class ToCustomerRelationshipManagementDepartment(BaseModel):
|
| 18 |
+
"""Transfers work to a specialized assistant to handle customer relationship management issues."""
|
| 19 |
+
|
| 20 |
+
request: str = Field(
|
| 21 |
+
description="Any necessary followup questions the customer relationship management assistant should clarify before proceeding."
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
class ToHumanResourceDepartment(BaseModel):
|
| 25 |
+
"""Transfers work to a specialized assistant to handle human resource issues."""
|
| 26 |
+
|
| 27 |
+
request: str = Field(
|
| 28 |
+
description="Any necessary followup questions the human resource department assistant should clarify before proceeding."
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
class ToSupplyChainManagementDepartment(BaseModel):
|
| 32 |
+
"""Transfers work to a specialized assistant to handle supply chain issues."""
|
| 33 |
+
|
| 34 |
+
request: str = Field(
|
| 35 |
+
description="Any necessary followup questions the supply chain department assistant should clarify before proceeding."
|
| 36 |
+
)
|