1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
| EMBEDDING_MODELS = { "text-embedding-ada-002": { "provider": "openai", "dimension": 1536, "max_tokens": 8191, "description": "通用最强模型", "cost_per_1k": 0.0001, "strengths": ["通用能力强", "稳定性好"], "weaknesses": ["需要API调用", "有延迟"] }, "sentence-transformers/all-MiniLM-L6-v2": { "provider": "huggingface", "dimension": 384, "max_tokens": 256, "description": "轻量高效模型", "cost_per_1k": 0, "strengths": ["速度快", "本地部署", "中文还行"], "weaknesses": ["精度略低"] }, "sentence-transformers/all-mpnet-base-v2": { "provider": "huggingface", "dimension": 768, "max_tokens": 384, "description": "精度最高模型", "cost_per_1k": 0, "strengths": ["精度最高", "支持多语言"], "weaknesses": ["相对较慢"] }, "moka-ai/m3e-base": { "provider": "huggingface", "dimension": 768, "max_tokens": 512, "description": "中文优化模型", "cost_per_1k": 0, "strengths": ["中文优秀", "性价比高"], "weaknesses": ["英文较弱"] }, "BAAI/bge-large-zh": { "provider": "huggingface", "dimension": 1024, "max_tokens": 512, "description": "中文BGE大模型", "cost_per_1k": 0, "strengths": ["中文最强", "精度高"], "weaknesses": ["占用资源大"] }, "intfloat/e5-base-v2": { "provider": "huggingface", "dimension": 768, "max_tokens": 512, "description": "E5通用模型", "cost_per_1k": 0, "strengths": ["通用性好", "支持多语言"], "weaknesses": ["需要前缀"] } }
|