maximlupey
null value traces
@Marc def run_my_custom_llm_app(input, system_prompt):
print(input)
messages = [
{"role":"system", "content": system_prompt},
{"role":"user", "content": input["question"]}
]
trace = langfuse.trace(input=input)
generationStartTime = datetime.now() openai_completion = openai.chat.completions.create( model="gpt-3.5-turbo", messages=messages ).choices[0].message.content langfuse_generation = trace.generation( name="bm4", input=messages, output=openai_completion, model="gpt-3.5-turbo", start_time=generationStartTime, end_time=datetime.now() ) print(trace) trace.update(output=openai_completion) return openai_completion, trace
generationStartTime = datetime.now() openai_completion = openai.chat.completions.create( model="gpt-3.5-turbo", messages=messages ).choices[0].message.content langfuse_generation = trace.generation( name="bm4", input=messages, output=openai_completion, model="gpt-3.5-turbo", start_time=generationStartTime, end_time=datetime.now() ) print(trace) trace.update(output=openai_completion) return openai_completion, trace
24 replies