|
| 1 | +import os |
| 2 | +import json |
| 3 | +from vikingdb.knowledge import VikingKnowledge, RerankDataItem |
| 4 | +from vikingdb.auth import IAM, APIKey |
| 5 | +from vikingdb.knowledge.models.search import ( |
| 6 | + SearchCollectionRequest, |
| 7 | + SearchKnowledgeRequest, |
| 8 | +) |
| 9 | +from vikingdb.knowledge.models.chat import ChatMessage, ChatCompletionRequest |
| 10 | +from vikingdb.knowledge.models.service_chat import ServiceChatRequest |
| 11 | +from vikingdb.knowledge.exceptions import VikingKnowledgeException |
| 12 | + |
| 13 | + |
| 14 | +def init_client(): |
| 15 | + ak = os.getenv("VOLC_AK") |
| 16 | + sk = os.getenv("VOLC_SK") |
| 17 | + client = VikingKnowledge(auth=IAM(ak=ak, sk=sk)) |
| 18 | + return client |
| 19 | + |
| 20 | +def init_client_by_apikey(): |
| 21 | + api_key = os.getenv("VIKING_SERVICE_API_KEY") |
| 22 | + client = VikingKnowledge(auth=APIKey(api_key=api_key)) |
| 23 | + return client |
| 24 | + |
| 25 | + |
| 26 | +def init_collection(client: VikingKnowledge): |
| 27 | + resource_id = os.getenv("VIKING_COLLECTION_RID") |
| 28 | + collection_name = os.getenv("VIKING_COLLECTION_NAME") or "financial_reports" |
| 29 | + project_name = os.getenv("VIKING_PROJECT") or "default" |
| 30 | + return client.collection(resource_id=resource_id, collection_name=collection_name, project_name=project_name) |
| 31 | + |
| 32 | + |
| 33 | +def run_search_collection(): |
| 34 | + client = init_client() |
| 35 | + kc = init_collection(client) |
| 36 | + query = "2025 Q1 revenue growth" |
| 37 | + sc_req = SearchCollectionRequest( |
| 38 | + query=query, |
| 39 | + limit=10, |
| 40 | + dense_weight=0.5, |
| 41 | + rerank_switch=False, |
| 42 | + retrieve_count=25, |
| 43 | + endpoint_id=None, |
| 44 | + rerank_model="Doubao-pro-4k-rerank", |
| 45 | + rerank_only_chunk=False, |
| 46 | + query_param=None, |
| 47 | + ) |
| 48 | + try: |
| 49 | + sc_res = kc.search_collection(sc_req) |
| 50 | + print("search_collection:", sc_res.model_dump(by_alias=True)) |
| 51 | + except VikingKnowledgeException as e: |
| 52 | + print("search_collection_error:", e) |
| 53 | + |
| 54 | + |
| 55 | +def run_search_knowledge(): |
| 56 | + client = init_client() |
| 57 | + kc = init_collection(client) |
| 58 | + query = "2025 Q1 revenue growth" |
| 59 | + sk_req = SearchKnowledgeRequest( |
| 60 | + query=query, |
| 61 | + image_query=None, |
| 62 | + pre_processing=None, |
| 63 | + post_processing=None, |
| 64 | + query_param=None, |
| 65 | + limit=10, |
| 66 | + dense_weight=0.5, |
| 67 | + ) |
| 68 | + try: |
| 69 | + sk_res = kc.search_knowledge(sk_req) |
| 70 | + print("search_knowledge:", sk_res.model_dump(by_alias=True)) |
| 71 | + except VikingKnowledgeException as e: |
| 72 | + print("search_knowledge_error:", e) |
| 73 | + |
| 74 | + |
| 75 | +def run_chat_completion(): |
| 76 | + client = init_client() |
| 77 | + msgs = [ |
| 78 | + ChatMessage(role="system", content="你是一位在线客服,根据<context>中的财报信息回答用户问题"), |
| 79 | + ChatMessage(role="user", content=[{"type": "text", "text": "总结下 2025 Q1 收入表现"}]), |
| 80 | + ] |
| 81 | + req = ChatCompletionRequest( |
| 82 | + model="Doubao-1-5-pro-32k", |
| 83 | + messages=msgs, |
| 84 | + thinking=None, |
| 85 | + max_tokens=4096, |
| 86 | + temperature=0.1, |
| 87 | + return_token_usage=True, |
| 88 | + api_key=os.getenv("VIKING_CHAT_API_KEY"), |
| 89 | + stream=False, |
| 90 | + ) |
| 91 | + try: |
| 92 | + res = client.chat_completion(req) |
| 93 | + print("chat_completion:", res.model_dump(by_alias=True)) |
| 94 | + except VikingKnowledgeException as e: |
| 95 | + print("chat_completion_error:", e) |
| 96 | + |
| 97 | + |
| 98 | +def run_service_chat(): |
| 99 | + client = init_client_by_apikey() |
| 100 | + service_rid = os.getenv("VIKING_SERVICE_RID") |
| 101 | + msgs = [ChatMessage(role="user", content="列举 2025 Q1 财报里的三项亮点")] |
| 102 | + req = ServiceChatRequest( |
| 103 | + service_resource_id=service_rid, |
| 104 | + messages=msgs, |
| 105 | + query_param=None, |
| 106 | + stream=False, |
| 107 | + ) |
| 108 | + try: |
| 109 | + res = client.service_chat(req, timeout=120) |
| 110 | + |
| 111 | + print("service_chat:", res.model_dump(by_alias=True)) |
| 112 | + except VikingKnowledgeException as e: |
| 113 | + print("service_chat_error:", e) |
| 114 | + |
| 115 | + |
| 116 | +def run_rerank_ops(): |
| 117 | + client = init_client() |
| 118 | + query = "2025 Q1 revenue growth" |
| 119 | + datas = [ |
| 120 | + RerankDataItem(query=query, content="Revenue grew 12% YoY to $3.4B.", title="Revenue"), |
| 121 | + RerankDataItem(query=query, content="Operating margin improved by 1.5pp to 17%.", title="Margin"), |
| 122 | + ] |
| 123 | + try: |
| 124 | + res = client.rerank( |
| 125 | + datas=datas, |
| 126 | + rerank_model="m3-v2-rerank", |
| 127 | + rerank_instruction=os.getenv("VIKING_RERANK_INSTRUCTION"), |
| 128 | + endpoint_id=None, |
| 129 | + ) |
| 130 | + print("rerank:", res) |
| 131 | + except VikingKnowledgeException as e: |
| 132 | + print("rerank_error:", e) |
| 133 | + |
| 134 | + |
| 135 | +if __name__ == "__main__": |
| 136 | + run_search_collection() |
| 137 | + run_search_knowledge() |
| 138 | + run_chat_completion() |
| 139 | + run_service_chat() |
| 140 | + run_rerank_ops() |
0 commit comments