fix: ark token usage is none (#8351)

This commit is contained in:
sino 2024-09-13 14:19:24 +08:00 committed by GitHub
parent 80a322aaa2
commit a45ac6ab98
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 11 deletions

View File

@ -208,11 +208,9 @@ class ArkClientV3:
presence_penalty=presence_penalty,
top_p=top_p,
temperature=temperature,
stream_options={"include_usage": True},
)
for chunk in chunks:
if not chunk.choices:
continue
yield chunk
yield from chunks
def embeddings(self, texts: list[str]) -> CreateEmbeddingResponse:
return self.ark.embeddings.create(model=self.endpoint_id, input=texts)

View File

@ -239,16 +239,14 @@ class VolcengineMaaSLargeLanguageModel(LargeLanguageModel):
def _handle_stream_chat_response(chunks: Generator[ChatCompletionChunk]) -> Generator:
for chunk in chunks:
if not chunk.choices:
continue
choice = chunk.choices[0]
yield LLMResultChunk(
model=model,
prompt_messages=prompt_messages,
delta=LLMResultChunkDelta(
index=choice.index,
message=AssistantPromptMessage(content=choice.delta.content, tool_calls=[]),
index=0,
message=AssistantPromptMessage(
content=chunk.choices[0].delta.content if chunk.choices else "", tool_calls=[]
),
usage=self._calc_response_usage(
model=model,
credentials=credentials,
@ -257,7 +255,7 @@ class VolcengineMaaSLargeLanguageModel(LargeLanguageModel):
)
if chunk.usage
else None,
finish_reason=choice.finish_reason,
finish_reason=chunk.choices[0].finish_reason if chunk.choices else None,
),
)