Skip to content

Commit ee2641e

Browse files
feat: 重构日志系统,引入统一的 logger 工具
Co-authored-by: aider (gemini/gemini-2.5-pro) <[email protected]>
1 parent 102624e commit ee2641e

File tree

5 files changed

+133
-99
lines changed

5 files changed

+133
-99
lines changed

packages/mcp-server/src/bridge/bridge.ts

Lines changed: 40 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,7 @@ import {
2121
type Content,
2222
} from '@google/genai';
2323
import { randomUUID } from 'node:crypto';
24-
25-
const LOG_PREFIX = '[MCP SERVER]';
26-
27-
const requestLogger = (debugMode: boolean) => (req: Request, res: Response, next: NextFunction) => {
28-
if (debugMode) {
29-
console.log(`${LOG_PREFIX} ${req.method} ${req.url}`);
30-
}
31-
next();
32-
};
24+
import { logger } from '../utils/logger.js';
3325

3426
export class GcliMcpBridge {
3527
private readonly config: Config;
@@ -62,11 +54,6 @@ export class GcliMcpBridge {
6254
}
6355

6456
public async start(app: Application) {
65-
66-
if (this.debugMode) {
67-
app.use(requestLogger(this.debugMode));
68-
}
69-
7057
app.all('/mcp', async (req: Request, res: Response) => {
7158
const sessionId = req.headers['mcp-session-id'] as string | undefined;
7259

@@ -75,23 +62,21 @@ export class GcliMcpBridge {
7562

7663
if (!session) {
7764
if (isInitializeRequest(req.body)) {
78-
if (this.debugMode) {
79-
console.log(
80-
`${LOG_PREFIX} Creating new session and transport for initialize request`,
81-
);
82-
}
65+
logger.debug(
66+
this.debugMode,
67+
'Creating new session and transport for initialize request',
68+
);
8369

8470
try {
8571
// **修改 6: 为新会话创建独立的 McpServer 和 Transport**
8672
const newMcpServer = await this.createNewMcpServer();
8773
const newTransport = new StreamableHTTPServerTransport({
8874
sessionIdGenerator: () => randomUUID(),
89-
onsessioninitialized: (newSessionId) => {
90-
if (this.debugMode) {
91-
console.log(
92-
`${LOG_PREFIX} Session initialized: ${newSessionId}`,
93-
);
94-
}
75+
onsessioninitialized: newSessionId => {
76+
logger.debug(
77+
this.debugMode,
78+
`Session initialized: ${newSessionId}`,
79+
);
9580
// 存储新的会话对象
9681
this.sessions[newSessionId] = {
9782
mcpServer: newMcpServer,
@@ -103,11 +88,10 @@ export class GcliMcpBridge {
10388
newTransport.onclose = () => {
10489
const sid = newTransport.sessionId;
10590
if (sid && this.sessions[sid]) {
106-
if (this.debugMode) {
107-
console.log(
108-
`${LOG_PREFIX} Session ${sid} closed, removing session object.`,
109-
);
110-
}
91+
logger.debug(
92+
this.debugMode,
93+
`Session ${sid} closed, removing session object.`,
94+
);
11195
delete this.sessions[sid];
11296
}
11397
};
@@ -118,15 +102,15 @@ export class GcliMcpBridge {
118102
session = { mcpServer: newMcpServer, transport: newTransport };
119103
} catch (e) {
120104
// Handle errors during server creation
121-
console.error(`${LOG_PREFIX} Error creating new MCP session:`, e);
105+
logger.error('Error creating new MCP session:', e);
122106
if (!res.headersSent) {
123107
res.status(500).json({ error: 'Failed to create session' });
124108
}
125109
return;
126110
}
127111
} else {
128-
console.error(
129-
`${LOG_PREFIX} Bad Request: Missing session ID for non-initialize request.`,
112+
logger.error(
113+
'Bad Request: Missing session ID for non-initialize request.',
130114
);
131115
res.status(400).json({
132116
jsonrpc: '2.0',
@@ -138,17 +122,18 @@ export class GcliMcpBridge {
138122
});
139123
return;
140124
}
141-
} else if (this.debugMode) {
142-
console.log(
143-
`${LOG_PREFIX} Reusing transport and server for session: ${sessionId}`,
125+
} else {
126+
logger.debug(
127+
this.debugMode,
128+
`Reusing transport and server for session: ${sessionId}`,
144129
);
145130
}
146131

147132
try {
148133
// **修改 7: 使用会话特定的 transport 来处理请求**
149134
await session.transport.handleRequest(req, res, req.body);
150135
} catch (e) {
151-
console.error(`${LOG_PREFIX} Error handling request:`, e);
136+
logger.error('Error handling request:', e);
152137
if (!res.headersSent) {
153138
res.status(500).end();
154139
}
@@ -173,11 +158,10 @@ export class GcliMcpBridge {
173158

174159
// 如果为这些工具设置了专用的模型,则创建一个新的配置和工具实例
175160
if (toolModel) {
176-
if (this.debugMode) {
177-
console.log(
178-
`[MCP SERVER] Using custom model "${toolModel}" for tool "${tool.name}"`,
179-
);
180-
}
161+
logger.debug(
162+
this.debugMode,
163+
`Using custom model "${toolModel}" for tool "${tool.name}"`,
164+
);
181165

182166
// 步骤 1: 创建一个 this.config 的代理。
183167
// 这个代理对象会拦截对 getModel 方法的调用。
@@ -212,22 +196,29 @@ export class GcliMcpBridge {
212196
args: Record<string, unknown>,
213197
extra: { signal: AbortSignal },
214198
) => {
199+
const startTime = Date.now();
200+
logger.info('MCP tool call started', { toolName: tool.name, args });
215201
try {
216202
// *** 关键:现在所有工具都通过这个统一的路径执行 ***
217203
// toolInstanceForExecution 要么是原始工具,要么是带有自定义模型配置的新实例
218204
const result = await toolInstanceForExecution.execute(
219205
args,
220206
extra.signal,
221207
);
208+
const durationMs = Date.now() - startTime;
209+
logger.info('MCP tool call finished', {
210+
toolName: tool.name,
211+
status: 'success',
212+
durationMs,
213+
});
222214
return this.convertGcliResultToMcpResult(result);
223215
} catch (e) {
224-
const errorMessage = e instanceof Error ? e.message : String(e);
225-
console.error(
226-
`${LOG_PREFIX} Error executing tool '${tool.name}': ${errorMessage}`,
227-
);
228-
throw new Error(
229-
`Error executing tool '${tool.name}': ${errorMessage}`,
230-
);
216+
const durationMs = Date.now() - startTime;
217+
logger.error('MCP tool call failed', e as Error, {
218+
toolName: tool.name,
219+
durationMs,
220+
});
221+
throw e; // 重新抛出错误,让 MCP SDK 处理
231222
}
232223
},
233224
);

packages/mcp-server/src/bridge/openai.ts

Lines changed: 28 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,30 @@ import { createOpenAIStreamTransformer } from './stream-transformer.js';
44
import { GeminiApiClient } from '../gemini-client.js'; // <-- 引入新类
55
import { type OpenAIChatCompletionRequest } from '../types.js'; // <-- 引入新类型
66
import { mapErrorToOpenAIError } from '../utils/error-mapper.js';
7+
import { logger } from '../utils/logger.js';
8+
import { randomUUID } from 'node:crypto';
79

810
export function createOpenAIRouter(config: Config, debugMode = false): Router {
911
const router = Router();
1012

13+
// 中间件:为每个请求添加 requestId
14+
router.use((req, res, next) => {
15+
(req as any).requestId = randomUUID();
16+
next();
17+
});
18+
1119
router.post('/chat/completions', async (req: Request, res: Response) => {
20+
const requestId = (req as any).requestId;
21+
const startTime = Date.now();
1222
try {
1323
const body = req.body as OpenAIChatCompletionRequest;
14-
if (debugMode) {
15-
console.log(
16-
'[OpenAI Bridge] Received /chat/completions request:',
17-
JSON.stringify(body, null, 2),
18-
);
19-
}
24+
25+
logger.info('OpenAI bridge request received', {
26+
requestId,
27+
model: body.model,
28+
stream: body.stream,
29+
});
30+
logger.debug(debugMode, 'Request body:', { requestId, body });
2031
const stream = body.stream !== false;
2132

2233
if (!stream) {
@@ -75,9 +86,19 @@ export function createOpenAIRouter(config: Config, debugMode = false): Router {
7586
}
7687
// --- 修正结束 ---
7788

89+
const durationMs = Date.now() - startTime;
90+
logger.info('OpenAI bridge request finished', {
91+
requestId,
92+
status: 'success',
93+
durationMs,
94+
});
7895
res.end();
7996
} catch (e: unknown) {
80-
console.error('[OpenAI Bridge] Error:', e);
97+
const durationMs = Date.now() - startTime;
98+
logger.error('OpenAI bridge request failed', e as Error, {
99+
requestId,
100+
durationMs,
101+
});
81102

82103
// 调用新的错误映射函数
83104
const { openAIError, statusCode } = mapErrorToOpenAIError(e);

packages/mcp-server/src/gemini-client.ts

Lines changed: 16 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import {
2020
type StreamChunk,
2121
type ReasoningData,
2222
} from './types.js';
23+
import { logger } from './utils/logger.js';
2324

2425
/**
2526
* Recursively removes fields from a JSON schema that are not supported by the
@@ -140,8 +141,11 @@ export class GeminiApiClient {
140141
},
141142
});
142143
} catch (e) {
143-
console.error(
144-
'[GeminiApiClient] Error parsing tool call arguments:',
144+
logger.warn(
145+
'Failed to parse tool call arguments',
146+
{
147+
arguments: toolCall.function.arguments,
148+
},
145149
e,
146150
);
147151
}
@@ -238,22 +242,14 @@ export class GeminiApiClient {
238242
}): Promise<AsyncGenerator<StreamChunk>> {
239243
const history = messages.map(msg => this.openAIMessageToGemini(msg));
240244
const lastMessage = history.pop();
241-
242-
// Always show the model being used
243-
console.log(`[GeminiApiClient] Using model: ${model}`);
244-
245-
// Only show detailed history in debug mode
246-
if (this.debugMode) {
247-
console.log(
248-
'[GeminiApiClient] History:',
249-
JSON.stringify(history, null, 2),
250-
);
251-
console.log(
252-
'[GeminiApiClient] Last Message:',
253-
JSON.stringify(lastMessage, null, 2),
254-
);
255-
}
256-
245+
246+
logger.info('Calling Gemini API', { model });
247+
248+
logger.debug(this.debugMode, 'Sending request to Gemini', {
249+
historyLength: history.length,
250+
lastMessage,
251+
});
252+
257253
if (!lastMessage) {
258254
throw new Error('No message to send.');
259255
}
@@ -291,10 +287,8 @@ export class GeminiApiClient {
291287
},
292288
});
293289

294-
if (this.debugMode) {
295-
console.log('[GeminiApiClient] Got stream from Gemini.');
296-
}
297-
290+
logger.debug(this.debugMode, 'Got stream from Gemini.');
291+
298292
// Transform the event stream to a simpler StreamChunk stream
299293
return (async function* (): AsyncGenerator<StreamChunk> {
300294
for await (const response of geminiStream) {

packages/mcp-server/src/index.ts

Lines changed: 14 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,7 @@ import { loadServerConfig } from './config/config.js';
1616
import { GcliMcpBridge } from './bridge/bridge.js';
1717
import { createOpenAIRouter } from './bridge/openai.js';
1818
import express from 'express';
19-
20-
// Simple console logger for now
21-
const logger = {
22-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
23-
warn: (...args: any[]) => console.warn('[WARN]', ...args),
24-
};
19+
import { logger } from './utils/logger.js';
2520

2621
function mergeMcpServers(
2722
settings: Settings,
@@ -64,11 +59,13 @@ async function startMcpServer() {
6459
const debugMode = args.includes('--debug');
6560

6661
if (isNaN(port)) {
67-
console.error('Invalid port number provided. Use --port=<number> or set GEMINI_MCP_PORT environment variable.');
62+
logger.error(
63+
'Invalid port number provided. Use --port=<number> or set GEMINI_MCP_PORT environment variable.',
64+
);
6865
process.exit(1);
6966
}
7067

71-
console.log('🚀 Starting Gemini CLI MCP Server...');
68+
logger.info('Starting Gemini CLI MCP Server...');
7269

7370
// 2. 复用配置加载的核心部分,但手动构造 Config
7471
const workspaceRoot = process.cwd();
@@ -86,19 +83,17 @@ async function startMcpServer() {
8683
// Initialize Auth - this is critical to initialize the tool registry and gemini client
8784
let selectedAuthType = settings.merged.selectedAuthType;
8885
if (!selectedAuthType && !process.env.GEMINI_API_KEY) {
89-
console.error(
86+
logger.error(
9087
'Auth missing: Please set `selectedAuthType` in .gemini/settings.json or set the GEMINI_API_KEY environment variable.',
9188
);
9289
process.exit(1);
9390
}
9491
selectedAuthType = selectedAuthType || AuthType.USE_GEMINI;
9592
await config.refreshAuth(selectedAuthType);
96-
if (debugMode) {
97-
console.log(`Using authentication method: ${selectedAuthType}`);
98-
}
93+
logger.debug(debugMode, `Using authentication method: ${selectedAuthType}`);
9994

10095
// Log the model being used for tools. This is now set in loadServerConfig.
101-
console.log(`⚙️ Using model for tools: ${config.getModel()}`);
96+
logger.debug(debugMode, `Using model for tools: ${config.getModel()}`);
10297

10398
// 4. 初始化并启动 MCP 桥接服务 和 OpenAI 服务
10499
const mcpBridge = new GcliMcpBridge(config, cliVersion, debugMode);
@@ -114,17 +109,15 @@ async function startMcpServer() {
114109
app.use('/v1', openAIRouter);
115110

116111
app.listen(port, () => {
117-
console.log(
118-
`🚀 Gemini CLI MCP Server and OpenAI Bridge are running on port ${port}`,
119-
);
120-
console.log(` - MCP transport listening on http://localhost:${port}/mcp`);
121-
console.log(
122-
` - OpenAI-compatible endpoints available at http://localhost:${port}/v1`,
123-
);
112+
logger.info('Server running', {
113+
port,
114+
mcpUrl: `http://localhost:${port}/mcp`,
115+
openAIUrl: `http://localhost:${port}/v1`,
116+
});
124117
});
125118
}
126119

127120
startMcpServer().catch(error => {
128-
console.error('Failed to start Gemini CLI MCP Bridge:', error);
121+
logger.error('Failed to start Gemini CLI MCP Bridge:', error);
129122
process.exit(1);
130123
});

0 commit comments

Comments
 (0)