Skip to content

Commit 2676d25

Browse files
sasha-gitgcopybara-github
authored andcommitted
fix: Correct logit_bias type annotation to accept keys as strings
PiperOrigin-RevId: 655305894
1 parent 6fceebf commit 2676d25

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

vertexai/language_models/_language_models.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1354,7 +1354,7 @@ def predict(
13541354
logprobs: Optional[int] = None,
13551355
presence_penalty: Optional[float] = None,
13561356
frequency_penalty: Optional[float] = None,
1357-
logit_bias: Optional[Dict[int, float]] = None,
1357+
logit_bias: Optional[Dict[str, float]] = None,
13581358
seed: Optional[int] = None,
13591359
) -> "MultiCandidateTextGenerationResponse":
13601360
"""Gets model response for a single prompt.
@@ -1443,7 +1443,7 @@ async def predict_async(
14431443
logprobs: Optional[int] = None,
14441444
presence_penalty: Optional[float] = None,
14451445
frequency_penalty: Optional[float] = None,
1446-
logit_bias: Optional[Dict[int, float]] = None,
1446+
logit_bias: Optional[Dict[str, float]] = None,
14471447
seed: Optional[int] = None,
14481448
) -> "MultiCandidateTextGenerationResponse":
14491449
"""Asynchronously gets model response for a single prompt.
@@ -1524,7 +1524,7 @@ def predict_streaming(
15241524
logprobs: Optional[int] = None,
15251525
presence_penalty: Optional[float] = None,
15261526
frequency_penalty: Optional[float] = None,
1527-
logit_bias: Optional[Dict[int, float]] = None,
1527+
logit_bias: Optional[Dict[str, float]] = None,
15281528
seed: Optional[int] = None,
15291529
) -> Iterator[TextGenerationResponse]:
15301530
"""Gets a streaming model response for a single prompt.
@@ -1610,7 +1610,7 @@ async def predict_streaming_async(
16101610
logprobs: Optional[int] = None,
16111611
presence_penalty: Optional[float] = None,
16121612
frequency_penalty: Optional[float] = None,
1613-
logit_bias: Optional[Dict[int, float]] = None,
1613+
logit_bias: Optional[Dict[str, float]] = None,
16141614
seed: Optional[int] = None,
16151615
) -> AsyncIterator[TextGenerationResponse]:
16161616
"""Asynchronously gets a streaming model response for a single prompt.
@@ -1702,7 +1702,7 @@ def _create_text_generation_prediction_request(
17021702
logprobs: Optional[int] = None,
17031703
presence_penalty: Optional[float] = None,
17041704
frequency_penalty: Optional[float] = None,
1705-
logit_bias: Optional[Dict[int, int]] = None,
1705+
logit_bias: Optional[Dict[str, float]] = None,
17061706
seed: Optional[int] = None,
17071707
) -> "_PredictionRequest":
17081708
"""Prepares the text generation request for a single prompt.

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy