Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,11 +9,7 @@ callback_manager = CallbackManager()
|
|
| 9 |
|
| 10 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
| 11 |
model = "internlm2.5-latest"
|
| 12 |
-
api_key =
|
| 13 |
-
|
| 14 |
-
# api_base_url = "https://api.siliconflow.cn/v1"
|
| 15 |
-
# model = "internlm/internlm2_5-7b-chat"
|
| 16 |
-
# api_key = "eyJ0eXBlIjoiSldUIiwiYWxnIjoiSFM1MTIifQ.eyJqdGkiOiI2NDIwNzQyMiIsInJvbCI6IlJPTEVfUkVHSVNURVIiLCJpc3MiOiJPcGVuWExhYiIsImlhdCI6MTczMDgwNTgyNiwiY2xpZW50SWQiOiJlYm1ydm9kNnlvMG5semFlazF5cCIsInBob25lIjoiMTM0NTUxMjAyOTciLCJ1dWlkIjoiYTA4N2JhNjQtYjI0Zi00NGM3LTlhYWQtMjZmNDRkZDljNDBmIiwiZW1haWwiOiIiLCJleHAiOjE3NDYzNTc4MjZ9._5eSNmjRIkWLjoGDCLaL26urtRP45f6x-cWXjZdC0DrN9svKGegFMbUj04v0cz_rXvOBYSgMzRoXGEk2KEoskA"
|
| 17 |
|
| 18 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
| 19 |
|
|
@@ -22,18 +18,21 @@ llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_mod
|
|
| 22 |
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
| 23 |
st.title("llama_index_demo")
|
| 24 |
|
|
|
|
|
|
|
|
|
|
| 25 |
# εε§ε樑ε
|
| 26 |
@st.cache_resource
|
| 27 |
def init_models():
|
| 28 |
embed_model = HuggingFaceEmbedding(
|
| 29 |
-
model_name="
|
| 30 |
)
|
| 31 |
Settings.embed_model = embed_model
|
| 32 |
|
| 33 |
#η¨εε§εllm
|
| 34 |
Settings.llm = llm
|
| 35 |
|
| 36 |
-
documents = SimpleDirectoryReader("
|
| 37 |
index = VectorStoreIndex.from_documents(documents)
|
| 38 |
query_engine = index.as_query_engine()
|
| 39 |
|
|
|
|
| 9 |
|
| 10 |
api_base_url = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1/"
|
| 11 |
model = "internlm2.5-latest"
|
| 12 |
+
api_key = os.getenv('INTERN_KEY')
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
llm =OpenAILike(model=model, api_base=api_base_url, api_key=api_key, is_chat_model=True,callback_manager=callback_manager)
|
| 15 |
|
|
|
|
| 18 |
st.set_page_config(page_title="llama_index_demo", page_icon="π¦π")
|
| 19 |
st.title("llama_index_demo")
|
| 20 |
|
| 21 |
+
#os.system("git lfs install")
|
| 22 |
+
#os.system("git clone https://www.modelscope.cn/Ceceliachenen/paraphrase-multilingual-MiniLM-L12-v2.git")
|
| 23 |
+
|
| 24 |
# εε§ε樑ε
|
| 25 |
@st.cache_resource
|
| 26 |
def init_models():
|
| 27 |
embed_model = HuggingFaceEmbedding(
|
| 28 |
+
model_name="paraphrase-multilingual-MiniLM-L12-v2"
|
| 29 |
)
|
| 30 |
Settings.embed_model = embed_model
|
| 31 |
|
| 32 |
#η¨εε§εllm
|
| 33 |
Settings.llm = llm
|
| 34 |
|
| 35 |
+
documents = SimpleDirectoryReader("./data").load_data()
|
| 36 |
index = VectorStoreIndex.from_documents(documents)
|
| 37 |
query_engine = index.as_query_engine()
|
| 38 |
|