Pranjal Gupta commited on
Commit
7bd0b80
·
1 Parent(s): 9d39028

gradio text box xhange

Browse files
Files changed (1) hide show
  1. app.py +24 -13
app.py CHANGED
@@ -183,22 +183,33 @@ with gr.Blocks(title="Contextual RAG Chatbot on Hugging Face Spaces") as demo:
183
  gr.Markdown("## Contextual RAG Chatbot")
184
  gr.Markdown("Please enter your Hugging Face Access Token to access gated models like Llama 3.2. You can generate a token from your [Hugging Face settings](https://huggingface.co/settings/tokens).")
185
 
186
- newToken = ""
187
-
188
- def tokenFunc (value) :
189
- newToken = value
190
-
191
- name = gr.Textbox(
192
  label="Hugging Face Access Token",
 
 
193
  )
194
-
195
- print("toeken =---------->[0]", name.value)
196
 
197
- chatbot = gr.ChatInterface(
198
- fn=lambda message, history: gradio_rag_wrapper(message, history, name.value),
199
- multimodal=True,
200
- description="Upload a PDF file to start chatting!",
201
- textbox=gr.MultimodalTextbox(file_types=[".pdf"]),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  )
203
 
204
  if __name__ == "__main__":
 
183
  gr.Markdown("## Contextual RAG Chatbot")
184
  gr.Markdown("Please enter your Hugging Face Access Token to access gated models like Llama 3.2. You can generate a token from your [Hugging Face settings](https://huggingface.co/settings/tokens).")
185
 
186
+ hf_token_textbox = gr.Textbox(
 
 
 
 
 
187
  label="Hugging Face Access Token",
188
+ type="password",
189
+ interactive=True
190
  )
 
 
191
 
192
+ # Use gr.Chatbot and gr.MultimodalTextbox for more control
193
+ chatbot = gr.Chatbot(label="Chatbot")
194
+
195
+ msg = gr.MultimodalTextbox(
196
+ placeholder="Upload a PDF file or enter your query...",
197
+ file_types=[".pdf"],
198
+ interactive=True
199
+ )
200
+
201
+ # Submit handler to process user input and update the chatbot
202
+ def respond(message, chat_history, hf_token_from_textbox):
203
+ # The wrapper function now correctly receives the token from the text box
204
+ response = gradio_rag_wrapper(message, chat_history, hf_token_from_textbox)
205
+ chat_history.append((message, response))
206
+ return "", chat_history
207
+
208
+ # Define the submit event to call the respond function
209
+ msg.submit(
210
+ respond,
211
+ inputs=[msg, chatbot, hf_token_textbox],
212
+ outputs=[msg, chatbot],
213
  )
214
 
215
  if __name__ == "__main__":