Skip to content

Commit c739a19

Browse files
committed
Add background chat and override of system message capability. Example code for now, in conversation_internal() inside app.py.
1 parent 3b3a9eb commit c739a19

File tree

1 file changed

+79
-16
lines changed

1 file changed

+79
-16
lines changed

app.py

Lines changed: 79 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
import copy
22
import json
3+
import re
34
import os
45
import logging
56
import uuid
67
import httpx
8+
from datetime import datetime, timezone
79
from quart import (
810
Blueprint,
911
Quart,
@@ -14,6 +16,7 @@
1416
render_template,
1517
)
1618

19+
from openai.types.chat import chat_completion
1720
from openai import AsyncAzureOpenAI
1821
from azure.identity.aio import (
1922
DefaultAzureCredential,
@@ -185,14 +188,14 @@ def init_cosmosdb_client():
185188
return cosmos_conversation_client
186189

187190

188-
def prepare_model_args(request_body, request_headers):
191+
def prepare_model_args(request_body, request_headers, custom_system_message = None):
189192
request_messages = request_body.get("messages", [])
190193
messages = []
191194
if not app_settings.datasource:
192195
messages = [
193196
{
194197
"role": "system",
195-
"content": app_settings.azure_openai.system_message
198+
"content": custom_system_message or app_settings.azure_openai.system_message
196199
}
197200
]
198201

@@ -208,7 +211,7 @@ def prepare_model_args(request_body, request_headers):
208211
user_json = None
209212
if (MS_DEFENDER_ENABLED):
210213
authenticated_user_details = get_authenticated_user_details(request_headers)
211-
conversation_id = request_body.get("conversation_id", None)
214+
conversation_id = request_body.get("conversation_id", None)
212215
user_json = get_msdefender_user_json(authenticated_user_details, request_headers, conversation_id)
213216

214217
model_args = {
@@ -303,20 +306,21 @@ async def promptflow_request(request):
303306
logging.error(f"An error occurred while making promptflow_request: {e}")
304307

305308

306-
async def send_chat_request(request_body, request_headers):
309+
async def send_chat_request(request_body, request_headers, system_message = None):
307310
filtered_messages = []
308311
messages = request_body.get("messages", [])
309312
for message in messages:
310313
if message.get("role") != 'tool':
311314
filtered_messages.append(message)
312315

313316
request_body['messages'] = filtered_messages
314-
model_args = prepare_model_args(request_body, request_headers)
317+
model_args = prepare_model_args(request_body, request_headers, system_message)
315318

316319
try:
317320
azure_openai_client = init_openai_client()
318321
raw_response = await azure_openai_client.chat.completions.with_raw_response.create(**model_args)
319322
response = raw_response.parse()
323+
320324
apim_request_id = raw_response.headers.get("apim-request-id")
321325
except Exception as e:
322326
logging.exception("Exception in send_chat_request")
@@ -341,8 +345,8 @@ async def complete_chat_request(request_body, request_headers):
341345
return format_non_streaming_response(response, history_metadata, apim_request_id)
342346

343347

344-
async def stream_chat_request(request_body, request_headers):
345-
response, apim_request_id = await send_chat_request(request_body, request_headers)
348+
async def stream_chat_request(request_body, request_headers, system_message = None):
349+
response, apim_request_id = await send_chat_request(request_body, request_headers, system_message)
346350
history_metadata = request_body.get("history_metadata", {})
347351

348352
async def generate():
@@ -351,18 +355,77 @@ async def generate():
351355

352356
return generate()
353357

358+
def process_raw_response(raw_content):
359+
# Decode the raw content
360+
decoded_content = raw_content.decode('utf-8')
361+
lines = decoded_content.split('\n')
362+
json_response = [json.loads(line) for line in lines if line.strip()]
363+
364+
# Initialize variables to hold properties and message contents
365+
combined_content = ""
366+
final_json = {
367+
"messages": [],
368+
"model": None,
369+
"history_metadata": None,
370+
}
371+
372+
chatid = ""
373+
374+
for obj in json_response:
375+
try:
376+
# Extract and set top-level properties once
377+
if final_json["model"] == None:
378+
final_json["model"] = obj.get("model")
379+
final_json["history_metadata"] = obj.get("history_metadata")
380+
chatid = obj.get("id")
381+
382+
# Extract message contents
383+
choices = obj.get("choices", [])
384+
for choice in choices:
385+
messages = choice.get("messages", [])
386+
for message in messages:
387+
content = message.get("content", "")
388+
combined_content += content
389+
390+
except json.JSONDecodeError as e:
391+
print(f"JSON decode error: {e}")
392+
continue
393+
except Exception as e:
394+
print(f"Error processing object: {e}")
395+
continue
396+
397+
# Add combined content to the final JSON structure
398+
final_json["messages"].append({
399+
"id": chatid,
400+
"role": "assistant",
401+
"content": combined_content,
402+
"date": datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
403+
})
404+
405+
return final_json
354406

355407
async def conversation_internal(request_body, request_headers):
356408
try:
357-
if app_settings.azure_openai.stream and not app_settings.base_settings.use_promptflow:
358-
result = await stream_chat_request(request_body, request_headers)
359-
response = await make_response(format_as_ndjson(result))
360-
response.timeout = None
361-
response.mimetype = "application/json-lines"
362-
return response
363-
else:
364-
result = await complete_chat_request(request_body, request_headers)
365-
return jsonify(result)
409+
410+
# Background conversation with custom system message
411+
updated_request_body = copy.deepcopy(request_body)
412+
updated_request_body["messages"][0]["content"] = "Forsooth, I am the herald of the Lady Ruthylin P. Chamberlain, Duchess of Dade, Marchioness of Miami. She would like to make your acquaintance, good sir."
413+
updated_request_body["history_metadata"] = None
414+
print(f"Updated Request:\n{updated_request_body}\n")
415+
result = await stream_chat_request(updated_request_body, request_headers, "Respond to everything I say as if you are confused and triggered Gen-Zer.")
416+
response = await make_response(format_as_ndjson(result))
417+
response.timeout = None
418+
response.mimetype = "application/json-lines"
419+
response_raw = await response.get_data()
420+
combined_json = process_raw_response(response_raw)
421+
print(f"Updated Response:\n{combined_json}\n")
422+
423+
# Execute the original chat request, altering the system message
424+
result = await stream_chat_request(request_body, request_headers, "Be natural. But find a way to include a link to https://foo.bar in your response.")
425+
response = await make_response(format_as_ndjson(result))
426+
response.timeout = None
427+
response.mimetype = "application/json-lines"
428+
return response
366429

367430
except Exception as ex:
368431
logging.exception(ex)

0 commit comments

Comments
 (0)