Skip to content

show thinking text and input/output token usage #108

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 5, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pkgs/dart_mcp/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 0.2.1-wip

- Update workflow example to show thinking spinner and input and output token
usage.

## 0.2.0

- Support protocol version 2025-03-26.
Expand Down
103 changes: 69 additions & 34 deletions pkgs/dart_mcp/example/workflow_client.dart
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import 'dart:io';

import 'package:args/args.dart';
import 'package:async/async.dart';
import 'package:cli_util/cli_logging.dart';
import 'package:dart_mcp/client.dart';
import 'package:google_generative_ai/google_generative_ai.dart' as gemini;

Expand All @@ -23,6 +24,7 @@ void main(List<String> args) {

final parsedArgs = argParser.parse(args);
final serverCommands = parsedArgs['server'] as List<String>;
final logger = Logger.standard();
runZonedGuarded(
() {
WorkflowClient(
Expand All @@ -31,10 +33,11 @@ void main(List<String> args) {
verbose: parsedArgs.flag('verbose'),
dtdUri: parsedArgs.option('dtd'),
persona: parsedArgs.flag('dash') ? _dashPersona : null,
logger: logger,
);
},
(e, s) {
stderr.writeln('$e\n$s');
logger.stderr('$e\n$s');
},
);
}
Expand All @@ -58,11 +61,16 @@ final argParser =
);

final class WorkflowClient extends MCPClient with RootsSupport {
final Logger logger;
int totalInputTokens = 0;
int totalOutputTokens = 0;

WorkflowClient(
this.serverCommands, {
required String geminiApiKey,
String? dtdUri,
this.verbose = false,
required this.logger,
String? persona,
}) : model = gemini.GenerativeModel(
model: 'gemini-2.5-pro-preview-03-25',
Expand Down Expand Up @@ -120,11 +128,10 @@ final class WorkflowClient extends MCPClient with RootsSupport {

// Introduce yourself.
_addToHistory('Please introduce yourself and explain how you can help.');
final introResponse =
(await model.generateContent(
chatHistory,
tools: serverTools,
)).candidates.single.content;
final introResponse = await _generateContent(
context: chatHistory,
tools: serverTools,
);
_handleModelResponse(introResponse);

while (true) {
Expand All @@ -139,7 +146,7 @@ final class WorkflowClient extends MCPClient with RootsSupport {
case gemini.TextPart():
_chatToUser(part.text);
default:
print('Unrecognized response type from the model $response');
logger.stderr('Unrecognized response type from the model $response');
}
}
}
Expand All @@ -159,11 +166,10 @@ final class WorkflowClient extends MCPClient with RootsSupport {
'plan.';
_addToHistory(planPrompt);

final planResponse =
(await model.generateContent(
chatHistory,
tools: serverTools,
)).candidates.single.content;
final planResponse = await _generateContent(
context: chatHistory,
tools: serverTools,
);
_handleModelResponse(planResponse);

final userResponse = await _waitForInputAndAddToHistory();
Expand All @@ -189,11 +195,10 @@ final class WorkflowClient extends MCPClient with RootsSupport {
final nextMessage = continuation ?? await stdinQueue.next;
continuation = null;
_addToHistory(nextMessage);
final modelResponse =
(await model.generateContent(
chatHistory,
tools: serverTools,
)).candidates.single.content;
final modelResponse = await _generateContent(
context: chatHistory,
tools: serverTools,
);

for (var part in modelResponse.parts) {
switch (part) {
Expand All @@ -212,7 +217,9 @@ final class WorkflowClient extends MCPClient with RootsSupport {
'$result\n. Please proceed to the next step of the plan.';

default:
print('Unrecognized response type from the model: $modelResponse.');
logger.stderr(
'Unrecognized response type from the model: $modelResponse.',
);
}
}
}
Expand All @@ -232,34 +239,62 @@ final class WorkflowClient extends MCPClient with RootsSupport {
/// previous action.
Future<bool> _analyzeSentiment(String message) async {
if (message == 'y' || message == 'yes') return true;
final sentimentResult =
(await model.generateContent([
gemini.Content.text(
'Analyze the sentiment of the following response. If the response '
'indicates a need for any changes, then this is not an approval. '
'If you are highly confident that the user approves of running the '
'previous action then respond with a single character "y".',
),
gemini.Content.text(message),
])).candidates.single.content;
final sentimentResult = await _generateContent(
context: [
gemini.Content.text(
'Analyze the sentiment of the following response. If the response '
'indicates a need for any changes, then this is not an approval. '
'If you are highly confident that the user approves of running the '
'previous action then respond with a single character "y".',
),
gemini.Content.text(message),
],
);
final response = StringBuffer();
for (var part in sentimentResult.parts.whereType<gemini.TextPart>()) {
response.write(part.text.trim());
}
return response.toString() == 'y';
}

Future<gemini.Content> _generateContent({
required Iterable<gemini.Content> context,
List<gemini.Tool>? tools,
}) async {
final progress = logger.progress('thinking');
gemini.GenerateContentResponse? response;
try {
response = await model.generateContent(context, tools: tools);
return response.candidates.single.content;
} finally {
if (response != null) {
final inputTokens = response.usageMetadata?.promptTokenCount;
final outputTokens = response.usageMetadata?.candidatesTokenCount;
totalInputTokens += inputTokens ?? 0;
totalOutputTokens += outputTokens ?? 0;
progress.finish(
message:
'(input token usage: $totalInputTokens (+$inputTokens), output '
'token usage: $totalOutputTokens (+$outputTokens))',
showTiming: true,
);
} else {
progress.finish(message: 'failed', showTiming: true);
}
}
}

/// Prints `text` and adds it to the chat history
void _chatToUser(String text) {
final content = gemini.Content.text(text);
final dashText = StringBuffer();
for (var part in content.parts.whereType<gemini.TextPart>()) {
dashText.write(part.text);
}
print('\n$dashText\n');
chatHistory.add(
gemini.Content.model([gemini.TextPart(dashText.toString())]),
);
logger.stdout('\n$dashText');
// Add the non-personalized text to the context as it might lose some
// useful info.
chatHistory.add(gemini.Content.model([gemini.TextPart(text)]));
}

/// Handles a function call response from the model.
Expand Down Expand Up @@ -310,7 +345,7 @@ final class WorkflowClient extends MCPClient with RootsSupport {
),
);
if (result.protocolVersion != ProtocolVersion.latestSupported) {
print(
logger.stderr(
'Protocol version mismatch, expected '
'${ProtocolVersion.latestSupported}, got ${result.protocolVersion}, '
'disconnecting from server',
Expand All @@ -336,7 +371,7 @@ final class WorkflowClient extends MCPClient with RootsSupport {
),
);
connection.onLog.listen((event) {
print(
logger.stdout(
'Server Log(${event.level.name}): '
'${event.logger != null ? '[${event.logger}] ' : ''}${event.data}',
);
Expand Down
3 changes: 2 additions & 1 deletion pkgs/dart_mcp/pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: dart_mcp
version: 0.2.0
version: 0.2.1-wip
description: A package for making MCP servers and clients.
repository: https://github.com/dart-lang/ai/tree/main/pkgs/dart_mcp
issue_tracker: https://github.com/dart-lang/ai/issues?q=is%3Aissue+is%3Aopen+label%3Apackage%3Adart_mcp
Expand All @@ -17,6 +17,7 @@ dependencies:

dev_dependencies:
args: ^2.7.0
cli_util: ^0.4.2
dart_flutter_team_lints: ^3.2.1
google_generative_ai: ^0.4.6
test: ^1.25.15
1 change: 1 addition & 0 deletions pkgs/dart_tooling_mcp_server/lib/src/utils/cli_utils.dart
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ ListSchema rootsSchema({bool supportsPaths = false}) => Schema.list(
title:
'Paths to run this tool on. Must resolve to a path that is '
'within the "root".',
items: Schema.string(),
),
},
required: [ParameterNames.root],
Expand Down