镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 14:36:48 +00:00
begin rag project with llama index
这个提交包含在:
@@ -0,0 +1,34 @@
|
||||
import llama_index
|
||||
|
||||
class rag_worker():
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def assign_embedding_model(self):
|
||||
pass
|
||||
|
||||
def save_to_checkpoint(self):
|
||||
pass
|
||||
|
||||
def load_from_checkpoint(self):
|
||||
pass
|
||||
|
||||
def add_documents_to_vector_store(self, documents):
|
||||
pass
|
||||
|
||||
def add_text_to_vector_store(self, documents):
|
||||
pass
|
||||
|
||||
def inspect_vector_store(self):
|
||||
pass
|
||||
|
||||
def retrieve_from_store_with_query(self, query):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def build_prompt(self):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,58 @@
|
||||
from llama_index.core import VectorStoreIndex
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from llama_index.core.callbacks.base import CallbackManager
|
||||
from llama_index.core.schema import TransformComponent
|
||||
from llama_index.core.service_context import ServiceContext
|
||||
from llama_index.core.settings import (
|
||||
Settings,
|
||||
callback_manager_from_settings_or_context,
|
||||
transformations_from_settings_or_context,
|
||||
)
|
||||
from llama_index.core.storage.storage_context import StorageContext
|
||||
|
||||
|
||||
class GptacVectorStoreIndex(VectorStoreIndex):
|
||||
|
||||
@classmethod
|
||||
def default_vector_store(
|
||||
cls,
|
||||
storage_context: Optional[StorageContext] = None,
|
||||
show_progress: bool = False,
|
||||
callback_manager: Optional[CallbackManager] = None,
|
||||
transformations: Optional[List[TransformComponent]] = None,
|
||||
# deprecated
|
||||
service_context: Optional[ServiceContext] = None,
|
||||
embed_model = None,
|
||||
**kwargs: Any,
|
||||
):
|
||||
"""Create index from documents.
|
||||
|
||||
Args:
|
||||
documents (Optional[Sequence[BaseDocument]]): List of documents to
|
||||
build the index from.
|
||||
|
||||
"""
|
||||
storage_context = storage_context or StorageContext.from_defaults()
|
||||
docstore = storage_context.docstore
|
||||
callback_manager = (
|
||||
callback_manager
|
||||
or callback_manager_from_settings_or_context(Settings, service_context)
|
||||
)
|
||||
transformations = transformations or transformations_from_settings_or_context(
|
||||
Settings, service_context
|
||||
)
|
||||
|
||||
with callback_manager.as_trace("index_construction"):
|
||||
|
||||
return cls(
|
||||
nodes=[],
|
||||
storage_context=storage_context,
|
||||
callback_manager=callback_manager,
|
||||
show_progress=show_progress,
|
||||
transformations=transformations,
|
||||
service_context=service_context,
|
||||
embed_model=embed_model,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
在新工单中引用
屏蔽一个用户