Skip to content

Commit edd8a90

Browse files
test(litellm): Replace mocks with httpx types in nonstreaming tests
1 parent 50d0b1f commit edd8a90

1 file changed

Lines changed: 34 additions & 17 deletions

File tree

tests/integrations/litellm/test_litellm.py

Lines changed: 34 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,13 @@ def model_dump(self):
163163
],
164164
)
165165
def test_nonstreaming_chat_completion(
166-
sentry_init, capture_events, send_default_pii, include_prompts
166+
reset_litellm_executor,
167+
sentry_init,
168+
capture_events,
169+
send_default_pii,
170+
include_prompts,
171+
get_model_response,
172+
nonstreaming_chat_completions_model_response,
167173
):
168174
sentry_init(
169175
integrations=[LiteLLMIntegration(include_prompts=include_prompts)],
@@ -173,31 +179,42 @@ def test_nonstreaming_chat_completion(
173179
events = capture_events()
174180

175181
messages = [{"role": "user", "content": "Hello!"}]
176-
mock_response = MockCompletionResponse()
177182

178-
with start_transaction(name="litellm test"):
179-
# Simulate what litellm does: call input callback, then success callback
180-
kwargs = {
181-
"model": "gpt-3.5-turbo",
182-
"messages": messages,
183-
}
183+
client = OpenAI(api_key="z")
184184

185-
_input_callback(kwargs)
186-
_success_callback(
187-
kwargs,
188-
mock_response,
189-
datetime.now(),
190-
datetime.now(),
191-
)
185+
model_response = get_model_response(
186+
nonstreaming_chat_completions_model_response,
187+
serialize_pydantic=True,
188+
request_headers={"X-Stainless-Raw-Response": "True"},
189+
)
190+
191+
with mock.patch.object(
192+
client.completions._client._client,
193+
"send",
194+
return_value=model_response,
195+
):
196+
with start_transaction(name="litellm test"):
197+
litellm.completion(
198+
model="gpt-3.5-turbo",
199+
messages=messages,
200+
client=client,
201+
)
202+
203+
litellm_utils.executor.shutdown(wait=True)
192204

193205
assert len(events) == 1
194206
(event,) = events
195207

196208
assert event["type"] == "transaction"
197209
assert event["transaction"] == "litellm test"
198210

199-
assert len(event["spans"]) == 1
200-
(span,) = event["spans"]
211+
chat_spans = list(
212+
x
213+
for x in event["spans"]
214+
if x["op"] == OP.GEN_AI_CHAT and x["origin"] == "auto.ai.litellm"
215+
)
216+
assert len(chat_spans) == 1
217+
span = chat_spans[0]
201218

202219
assert span["op"] == OP.GEN_AI_CHAT
203220
assert span["description"] == "chat gpt-3.5-turbo"

0 commit comments

Comments
 (0)