Skip to content
This repository has been archived by the owner on Oct 19, 2023. It is now read-only.

Commit

Permalink
fix: add tokens number
Browse files Browse the repository at this point in the history
  • Loading branch information
zac-li committed Jun 15, 2023
1 parent a814ad5 commit 9ed3708
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 7 deletions.
7 changes: 4 additions & 3 deletions lcserve/backend/langchain_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ class TraceInfo:
action: str
prompts: Optional[List[str]] = None
outputs: str = ""
tokens: Optional[int] = None
cost: Optional[float] = None


Expand All @@ -48,6 +49,7 @@ def __init__(self, tracer: Tracer, parent_span: Span):
self.tracer = tracer
self.parent_span = parent_span
self.logger = get_tracing_logger()
self.total_tokens = 0
self.total_cost = 0

def _register_span(self, run_id, span):
Expand Down Expand Up @@ -118,6 +120,7 @@ def on_llm_end(self, response: LLMResult, *, run_id: UUID, **kwargs: Any) -> Non
span=span_context.span_id,
action="on_llm_end",
outputs=texts,
tokens=round(self.total_tokens, 3) if self.total_tokens else None,
cost=round(self.total_cost, 3) if self.total_cost else None,
)
self.logger.info(json.dumps(trace_info.__dict__))
Expand Down Expand Up @@ -182,8 +185,6 @@ def on_agent_action(
if not self.tracer:
return

operation = "langchain.agent"

try:
span = self._current_span(run_id)
span.add_event(
Expand Down Expand Up @@ -238,7 +239,7 @@ class TracingCallbackHandler(TracingCallbackHandlerMixin):

class OpenAITracingCallbackHandler(TracingCallbackHandlerMixin, OpenAICallbackHandler):
def on_llm_end(self, response: LLMResult, *, run_id: UUID, **kwargs: Any) -> None:
# Set the computed total cost first with OpenAICallbackHandler and then handle the tracing
# Set the computed total token used and total cost first with OpenAICallbackHandler and then handle the tracing
OpenAICallbackHandler.on_llm_end(self, response, run_id=run_id, **kwargs)
TracingCallbackHandlerMixin.on_llm_end(self, response, run_id=run_id, **kwargs)

Expand Down
4 changes: 0 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,3 @@ jina-hubble-sdk
nest-asyncio
textual
toml
# Below libs caused issue https://github.com/hwchase17/langchain/issues/5113,
# can unpin once it's resolved
typing-inspect==0.8.0
typing_extensions==4.5.0

0 comments on commit 9ed3708

Please sign in to comment.