225 lines
7.2 KiB
TypeScript
225 lines
7.2 KiB
TypeScript
import type {
|
||
AiSearchParams,
|
||
AiSearchResponse,
|
||
AiSearchResult,
|
||
ChatParams,
|
||
ChatResponse,
|
||
Choice,
|
||
ChoiceMessage,
|
||
ImageParams,
|
||
ImageResult,
|
||
RerankParams,
|
||
RerankResponse,
|
||
Usage,
|
||
VideoParams,
|
||
VideoTaskResult,
|
||
} from './models'
|
||
|
||
export class SkillsClient {
|
||
private readonly apiKey: string;
|
||
private readonly baseUrl: string;
|
||
|
||
constructor(options: { apiKey: string; baseUrl: string }) {
|
||
this.apiKey = options.apiKey;
|
||
this.baseUrl = options.baseUrl;
|
||
}
|
||
|
||
private headers(): Record<string, string> {
|
||
return {
|
||
Authorization: `Bearer ${this.apiKey}`,
|
||
'Content-Type': 'application/json',
|
||
};
|
||
}
|
||
|
||
private url(path: string): string {
|
||
return `${this.baseUrl}/skills/v1/${path}`;
|
||
}
|
||
|
||
private async post<T>(path: string, body: unknown): Promise<T> {
|
||
const resp = await fetch(this.url(path), {
|
||
method: 'POST',
|
||
headers: this.headers(),
|
||
body: JSON.stringify(body),
|
||
});
|
||
if (!resp.ok) {
|
||
throw new Error(`HTTP ${resp.status}: ${await resp.text()}`);
|
||
}
|
||
return resp.json() as Promise<T>;
|
||
}
|
||
|
||
// ────────── Chat ──────────
|
||
|
||
private buildChatBody(params: ChatParams): Record<string, unknown> {
|
||
const body: Record<string, unknown> = {
|
||
model: params.model ?? 'gpt-5.1',
|
||
messages: params.messages,
|
||
stream: params.stream ?? false,
|
||
};
|
||
if (params.maxTokens != null) body.maxTokens = params.maxTokens;
|
||
if (params.temperature != null) body.temperature = params.temperature;
|
||
if (params.topP != null) body.topP = params.topP;
|
||
if (params.responseFormat != null) body.responseFormat = params.responseFormat;
|
||
if (params.tools != null) body.tools = params.tools;
|
||
if (params.toolChoice != null) body.toolChoice = params.toolChoice;
|
||
return body;
|
||
}
|
||
|
||
/** 非流式聊天 */
|
||
async chat(params: ChatParams): Promise<ChatResponse> {
|
||
const resp = await fetch(this.url('chat/completions'), {
|
||
method: 'POST',
|
||
headers: this.headers(),
|
||
body: JSON.stringify(this.buildChatBody({ ...params, stream: false })),
|
||
});
|
||
if (!resp.ok) throw new Error(`HTTP ${resp.status}: ${await resp.text()}`);
|
||
return parseChatResponse(await resp.json() as Record<string, unknown>);
|
||
}
|
||
|
||
/** 流式聊天 (SSE) */
|
||
async *chatStream(params: ChatParams): AsyncGenerator<ChatResponse> {
|
||
const resp = await fetch(this.url('chat/completions'), {
|
||
method: 'POST',
|
||
headers: this.headers(),
|
||
body: JSON.stringify(this.buildChatBody({ ...params, stream: true })),
|
||
});
|
||
if (!resp.ok) throw new Error(`HTTP ${resp.status}: ${await resp.text()}`);
|
||
if (!resp.body) throw new Error('Response body is null');
|
||
|
||
yield* parseSSEStream(resp.body, parseChatResponse);
|
||
}
|
||
|
||
// ────────── Image ──────────
|
||
|
||
/** 图片生成 */
|
||
async imageGenerate(params: ImageParams): Promise<ImageResult> {
|
||
return this.post<ImageResult>('image/generate', {
|
||
model: params.model ?? 'Nano Banana Pro',
|
||
...params,
|
||
});
|
||
}
|
||
|
||
// ────────── Video ──────────
|
||
|
||
/** 创建视频生成任务 */
|
||
async videoCreateTask(params: VideoParams): Promise<VideoTaskResult> {
|
||
return this.post<VideoTaskResult>('video/tasks', {
|
||
model: params.model ?? 'Doubao-Seedance-1.5-pro',
|
||
...params,
|
||
});
|
||
}
|
||
|
||
/** 查询视频生成任务状态 */
|
||
async videoGetTask(params: Pick<VideoParams, 'taskId' | 'model'>): Promise<VideoTaskResult> {
|
||
return this.post<VideoTaskResult>('video/tasks/query', {
|
||
model: params.model ?? 'Doubao-Seedance-1.5-pro',
|
||
taskId: params.taskId,
|
||
});
|
||
}
|
||
|
||
// ────────── AI Search ──────────
|
||
|
||
/** AI 搜索(非流式) */
|
||
async aiSearch(params: AiSearchParams): Promise<AiSearchResult> {
|
||
return this.post<AiSearchResult>('aiSearch', {
|
||
model: params.model ?? 'aiSearch',
|
||
...params,
|
||
stream: false,
|
||
});
|
||
}
|
||
|
||
/** AI 搜索(流式 SSE) */
|
||
async *aiSearchStream(params: AiSearchParams): AsyncGenerator<AiSearchResponse> {
|
||
const resp = await fetch(this.url('aiSearch'), {
|
||
method: 'POST',
|
||
headers: this.headers(),
|
||
body: JSON.stringify({
|
||
model: params.model ?? 'aiSearch',
|
||
...params,
|
||
stream: true,
|
||
}),
|
||
});
|
||
if (!resp.ok) throw new Error(`HTTP ${resp.status}: ${await resp.text()}`);
|
||
if (!resp.body) throw new Error('Response body is null');
|
||
|
||
yield* parseSSEStream(resp.body, (data) => (data.data ?? data) as AiSearchResponse);
|
||
}
|
||
|
||
// ────────── Rerank ──────────
|
||
|
||
/** 重排 */
|
||
async rerank(params: RerankParams): Promise<RerankResponse> {
|
||
return this.post<RerankResponse>('rerank', {
|
||
model: params.model ?? 'qwen3-vl-rerank',
|
||
...params,
|
||
});
|
||
}
|
||
}
|
||
|
||
// ────────── helpers ──────────
|
||
|
||
async function* parseSSEStream<T>(
|
||
body: ReadableStream<Uint8Array>,
|
||
parse: (data: Record<string, unknown>) => T,
|
||
): AsyncGenerator<T> {
|
||
const decoder = new TextDecoder();
|
||
const reader = body.getReader();
|
||
let buffer = '';
|
||
|
||
try {
|
||
while (true) {
|
||
const { done, value } = await reader.read();
|
||
if (done) break;
|
||
buffer += decoder.decode(value, { stream: true });
|
||
const lines = buffer.split('\n');
|
||
buffer = lines.pop() ?? '';
|
||
for (const line of lines) {
|
||
const trimmed = line.trim();
|
||
if (!trimmed || !trimmed.startsWith('data:')) continue;
|
||
const dataStr = trimmed.slice(5).trim();
|
||
if (dataStr === '[DONE]') return;
|
||
try {
|
||
yield parse(JSON.parse(dataStr) as Record<string, unknown>);
|
||
} catch { /* skip malformed lines */ }
|
||
}
|
||
}
|
||
} finally {
|
||
reader.releaseLock();
|
||
}
|
||
}
|
||
|
||
function parseChatResponse(data: Record<string, unknown>): ChatResponse {
|
||
const rawChoices = (data.choices as Record<string, unknown>[] | undefined) ?? [];
|
||
const choices: Choice[] = rawChoices.map((c) => {
|
||
const msgData = c.message as Record<string, unknown> | undefined;
|
||
const deltaData = c.delta as Record<string, unknown> | undefined;
|
||
const toMsg = (d: Record<string, unknown>): ChoiceMessage => ({
|
||
role: d.role as string | undefined,
|
||
content: d.content as string | undefined,
|
||
reasoningContent: d.reasoningContent as string | undefined,
|
||
toolCalls: d.toolCalls as unknown[] | undefined,
|
||
});
|
||
return {
|
||
message: msgData ? toMsg(msgData) : undefined,
|
||
delta: deltaData ? toMsg(deltaData) : undefined,
|
||
finishReason: (c.finishReason ?? c.finish_reason) as string | undefined,
|
||
};
|
||
});
|
||
|
||
const usageData = data.usage as Record<string, unknown> | undefined;
|
||
const usage: Usage | undefined = usageData
|
||
? {
|
||
promptTokens: (usageData.promptTokens ?? usageData.prompt_tokens) as number | undefined,
|
||
completionTokens: (usageData.completionTokens ?? usageData.completion_tokens) as number | undefined,
|
||
totalTokens: (usageData.totalTokens ?? usageData.total_tokens) as number | undefined,
|
||
}
|
||
: undefined;
|
||
|
||
return {
|
||
id: data.id as string | undefined,
|
||
model: data.model as string | undefined,
|
||
success: data.success as boolean | undefined,
|
||
errorMessage: (data.errorMessage ?? data.error_message) as string | undefined,
|
||
choices,
|
||
usage,
|
||
};
|
||
} |