import React, { useState, useEffect } from 'react'; import { Card, Form, Input, InputNumber, Button, Space, Divider, Select, Spin, Tag, App } from 'antd'; import { ApiOutlined, SaveOutlined, SyncOutlined, CheckCircleOutlined } from '@ant-design/icons'; import { OllamaService } from '../utils/ollama'; import { useOllama } from '../contexts/OllamaContext'; const { Option } = Select; interface ModelInfo { name: string; size?: number; modified?: string; } const ModelSettings: React.FC = () => { const [form] = Form.useForm(); const [loading, setLoading] = useState(false); const [testing, setTesting] = useState(false); const [detectingModels, setDetectingModels] = useState(false); const [availableModels, setAvailableModels] = useState([]); const [connectionStatus, setConnectionStatus] = useState<'unknown' | 'success' | 'error'>('unknown'); const [currentModel, setCurrentModel] = useState(''); const { status, refreshModels } = useOllama(); const { message: messageApi } = App.useApp(); useEffect(() => { const loadConfig = async () => { // 从 localStorage 加载配置 const configData = localStorage.getItem('ai_system_config'); const config = configData ? JSON.parse(configData) : { ollamaUrl: 'http://localhost:11434', model: '', temperature: 0.7, topP: 0.9, maxTokens: 2000 }; form.setFieldsValue(config); setCurrentModel(config.model || ''); // 如果已连接且有模型,自动显示当前状态 if (status.isConnected && status.availableModels.length > 0) { setAvailableModels(status.availableModels); setConnectionStatus('success'); } }; loadConfig(); }, [form, status]); const handleDetectModels = async () => { setDetectingModels(true); setConnectionStatus('unknown'); try { const values = await form.validateFields(['ollamaUrl']); const ollamaService = new OllamaService(values); // 测试连接 const isConnected = await ollamaService.testConnection(); if (!isConnected) { setConnectionStatus('error'); messageApi.open({ type: 'error', content: '无法连接到 Ollama 服务,请检查服务地址和状态', }); setAvailableModels([]); return; } // 获取模型列表 const models = await ollamaService.getAvailableModelsWithInfo(); setAvailableModels(models); setConnectionStatus('success'); if (models.length === 0) { messageApi.open({ type: 'warning', content: '未检测到已安装的模型,请先使用 ollama pull 命令安装模型', }); } else { messageApi.open({ type: 'success', content: `成功检测到 ${models.length} 个已安装模型`, }); // 如果当前模型不在列表中,清空选择 if (currentModel && !models.find(m => m.name === currentModel)) { form.setFieldValue('model', undefined); setCurrentModel(''); messageApi.open({ type: 'warning', content: '当前选择的模型未在本地安装,请重新选择', }); } } // 刷新全局状态 await refreshModels(); } catch (error) { setConnectionStatus('error'); messageApi.open({ type: 'error', content: '模型检测失败,请检查 Ollama 服务状态', }); setAvailableModels([]); } finally { setDetectingModels(false); } }; const handleTestConnection = async () => { setTesting(true); setConnectionStatus('unknown'); try { const values = await form.validateFields(); const ollamaService = new OllamaService(values); const isConnected = await ollamaService.testConnection(); if (isConnected) { setConnectionStatus('success'); messageApi.open({ type: 'success', content: 'Ollama 服务连接成功!', }); await refreshModels(); } else { setConnectionStatus('error'); messageApi.open({ type: 'error', content: 'Ollama 服务连接失败,请检查服务地址', }); } } catch (error) { setConnectionStatus('error'); messageApi.open({ type: 'error', content: '连接测试失败,请检查配置', }); } finally { setTesting(false); } }; const handleModelChange = (value: string) => { setCurrentModel(value); }; const handleSave = async () => { setLoading(true); try { const values = await form.validateFields(); // 验证选择的模型是否可用 if (currentModel && availableModels.length > 0) { const modelExists = availableModels.find(m => m.name === currentModel); if (!modelExists) { messageApi.open({ type: 'error', content: '请选择本地已安装的模型,或点击"检测模型"刷新列表', }); setLoading(false); return; } } // 确保包含当前选择的模型 const configToSave = { ...values, model: currentModel || values.model }; // 保存到 localStorage localStorage.setItem('ai_system_config', JSON.stringify(configToSave)); // 显示保存成功提示 messageApi.open({ type: 'success', content: '配置保存成功!', }); // 更新当前模型显示 setCurrentModel(configToSave.model); // 刷新全局状态 await refreshModels(); } catch (error) { messageApi.open({ type: 'error', content: '配置保存失败,请检查输入', }); } finally { setLoading(false); } }; const formatModelSize = (bytes?: number) => { if (!bytes) return '未知'; const gb = bytes / (1024 * 1024 * 1024); return `${gb.toFixed(2)} GB`; }; return (

模型设置

配置本地 Ollama 服务和 AI 模型参数

{connectionStatus === 'success' ? '服务正常' : connectionStatus === 'error' ? '服务异常' : '未检测'}
{availableModels.length > 0 ? `已安装 ${availableModels.length} 个模型` : connectionStatus === 'success' ? '未安装模型' : '请先连接服务'}
服务配置
Ollama 服务地址} name="ollamaUrl" rules={[{ required: true, message: '请输入Ollama服务地址' }]} >
模型选择
AI 模型 {availableModels.length > 0 && ( {availableModels.length} 个可用 )} } name="model" rules={[{ required: true, message: '请选择AI模型' }]} tooltip="只能选择本地已安装的模型" > {currentModel && (
当前使用模型: {currentModel}
)}
生成参数
温度} name="temperature" rules={[{ required: true, message: '请输入温度值' }]} tooltip="控制生成文本的随机性" > Top P} name="topP" rules={[{ required: true, message: '请输入Top P值' }]} tooltip="控制生成文本的多样性" > 最大 Tokens} name="maxTokens" rules={[{ required: true, message: '请输入最大生成长度' }]} >
使用说明
  • 确保 Ollama 服务正在运行(默认端口 11434)
  • 点击"测试连接"验证 Ollama 服务是否正常
  • 点击"检测模型"获取本地已安装的模型列表
  • 模型选择限制为只能使用本地已安装的模型
  • 如需新模型,请使用命令:ollama pull 模型名
  • 推荐:qwen3:8b起步
  • 温度参数控制生成文本的随机性,建议值为 0.7
); }; export default ModelSettings;