Skip to content

Commit

Permalink
feat(api): api update (#183)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored Jan 29, 2025
1 parent 78a3faf commit 813fb7e
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 7
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-9502f284ff5064eb110a9b293f379ca91401cc2e3fcea6a9e5979a8af530c9b6.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/groqcloud%2Fgroqcloud-bc652d6aad32c27ff44cc2403b2f4e072ee694bdc1d5242cea30e98f506f55b2.yml
8 changes: 8 additions & 0 deletions src/groq/resources/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
reasoning_format: Optional[Literal["hidden", "raw", "parsed"]] | NotGiven = NOT_GIVEN,
response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "on_demand", "flex"]] | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -136,6 +137,8 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
reasoning_format: Specifies how to output reasoning tokens
response_format: An object specifying the format that the model must output.
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
Expand Down Expand Up @@ -219,6 +222,7 @@ def create(
"n": n,
"parallel_tool_calls": parallel_tool_calls,
"presence_penalty": presence_penalty,
"reasoning_format": reasoning_format,
"response_format": response_format,
"seed": seed,
"service_tier": service_tier,
Expand Down Expand Up @@ -275,6 +279,7 @@ async def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
reasoning_format: Optional[Literal["hidden", "raw", "parsed"]] | NotGiven = NOT_GIVEN,
response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN,
seed: Optional[int] | NotGiven = NOT_GIVEN,
service_tier: Optional[Literal["auto", "on_demand", "flex"]] | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -346,6 +351,8 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
reasoning_format: Specifies how to output reasoning tokens
response_format: An object specifying the format that the model must output.
Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the
Expand Down Expand Up @@ -429,6 +436,7 @@ async def create(
"n": n,
"parallel_tool_calls": parallel_tool_calls,
"presence_penalty": presence_penalty,
"reasoning_format": reasoning_format,
"response_format": response_format,
"seed": seed,
"service_tier": service_tier,
Expand Down
3 changes: 3 additions & 0 deletions src/groq/types/chat/completion_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,9 @@ class CompletionCreateParams(TypedDict, total=False):
far, increasing the model's likelihood to talk about new topics.
"""

reasoning_format: Optional[Literal["hidden", "raw", "parsed"]]
"""Specifies how to output reasoning tokens"""

response_format: Optional[ResponseFormat]
"""An object specifying the format that the model must output.
Expand Down
2 changes: 2 additions & 0 deletions tests/api_resources/chat/test_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def test_method_create_with_all_params(self, client: Groq) -> None:
n=1,
parallel_tool_calls=True,
presence_penalty=-2,
reasoning_format="hidden",
response_format={"type": "text"},
seed=0,
service_tier="auto",
Expand Down Expand Up @@ -160,6 +161,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGroq) -> N
n=1,
parallel_tool_calls=True,
presence_penalty=-2,
reasoning_format="hidden",
response_format={"type": "text"},
seed=0,
service_tier="auto",
Expand Down

0 comments on commit 813fb7e

Please # to comment.