Restore working user message display from previous version
Browse files- Reverted to the working user message flow from commit bc17742
- User messages should now appear in blue immediately when typed
- Removed problematic immediate display logic that was causing delays
- Fixed chat history display to show all messages properly
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <noreply@anthropic.com>
- app.py +4 -15
 - new_system_prompt.txt +2 -1
 - src.py +1 -0
 
    	
        app.py
    CHANGED
    
    | 
         @@ -529,13 +529,8 @@ def show_custom_response(response): 
     | 
|
| 529 | 
         | 
| 530 | 
         | 
| 531 | 
         
             
            # Chat history
         
     | 
| 532 | 
         
            -
            # Display chat history 
     | 
| 533 | 
         
             
            for response_id, response in enumerate(st.session_state.responses):
         
     | 
| 534 | 
         
            -
                # Skip showing the last user message if we're processing (already shown above)
         
     | 
| 535 | 
         
            -
                if (st.session_state.get("processing") and 
         
     | 
| 536 | 
         
            -
                    response_id == len(st.session_state.responses) - 1 and 
         
     | 
| 537 | 
         
            -
                    response["role"] == "user"):
         
     | 
| 538 | 
         
            -
                    continue
         
     | 
| 539 | 
         
             
                status = show_custom_response(response)
         
     | 
| 540 | 
         | 
| 541 | 
         
             
                # Show feedback section for assistant responses
         
     | 
| 
         @@ -634,15 +629,6 @@ if prompt and not st.session_state.get("processing"): 
     | 
|
| 634 | 
         
             
                        prompt = None
         
     | 
| 635 | 
         | 
| 636 | 
         
             
                if prompt:
         
     | 
| 637 | 
         
            -
                    # Show user message immediately with blue styling
         
     | 
| 638 | 
         
            -
                    st.markdown(f"""
         
     | 
| 639 | 
         
            -
                    <div style='display: flex; justify-content: flex-end; margin: 1rem 0;'>
         
     | 
| 640 | 
         
            -
                        <div class='user-message'>
         
     | 
| 641 | 
         
            -
                            {prompt}
         
     | 
| 642 | 
         
            -
                        </div>
         
     | 
| 643 | 
         
            -
                    </div>
         
     | 
| 644 | 
         
            -
                    """, unsafe_allow_html=True)
         
     | 
| 645 | 
         
            -
                    
         
     | 
| 646 | 
         
             
                    # Add user input to chat history
         
     | 
| 647 | 
         
             
                    user_response = get_from_user(prompt)
         
     | 
| 648 | 
         
             
                    st.session_state.responses.append(user_response)
         
     | 
| 
         @@ -651,6 +637,9 @@ if prompt and not st.session_state.get("processing"): 
     | 
|
| 651 | 
         
             
                    st.session_state.processing = True
         
     | 
| 652 | 
         
             
                    st.session_state.current_model = model_name
         
     | 
| 653 | 
         
             
                    st.session_state.current_question = prompt
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 654 | 
         | 
| 655 | 
         
             
            # Process the question if we're in processing state
         
     | 
| 656 | 
         
             
            if st.session_state.get("processing"):
         
     | 
| 
         | 
|
| 529 | 
         | 
| 530 | 
         | 
| 531 | 
         
             
            # Chat history
         
     | 
| 532 | 
         
            +
            # Display chat history
         
     | 
| 533 | 
         
             
            for response_id, response in enumerate(st.session_state.responses):
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 534 | 
         
             
                status = show_custom_response(response)
         
     | 
| 535 | 
         | 
| 536 | 
         
             
                # Show feedback section for assistant responses
         
     | 
| 
         | 
|
| 629 | 
         
             
                        prompt = None
         
     | 
| 630 | 
         | 
| 631 | 
         
             
                if prompt:
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 632 | 
         
             
                    # Add user input to chat history
         
     | 
| 633 | 
         
             
                    user_response = get_from_user(prompt)
         
     | 
| 634 | 
         
             
                    st.session_state.responses.append(user_response)
         
     | 
| 
         | 
|
| 637 | 
         
             
                    st.session_state.processing = True
         
     | 
| 638 | 
         
             
                    st.session_state.current_model = model_name
         
     | 
| 639 | 
         
             
                    st.session_state.current_question = prompt
         
     | 
| 640 | 
         
            +
                    
         
     | 
| 641 | 
         
            +
                    # Rerun to show processing indicator
         
     | 
| 642 | 
         
            +
                    st.rerun()
         
     | 
| 643 | 
         | 
| 644 | 
         
             
            # Process the question if we're in processing state
         
     | 
| 645 | 
         
             
            if st.session_state.get("processing"):
         
     | 
    	
        new_system_prompt.txt
    CHANGED
    
    | 
         @@ -34,9 +34,10 @@ DATA SAFETY: 
     | 
|
| 34 | 
         | 
| 35 | 
         
             
            PLOTTING REQUIREMENTS:
         
     | 
| 36 | 
         
             
            - Create plots for visualization requests: fig, ax = plt.subplots(figsize=(12, 7))
         
     | 
| 37 | 
         
            -
            - Display plots directly: st.pyplot(fig); plt.close()
         
     | 
| 38 | 
         
             
            - Store success message: answer = "Plot displayed successfully"
         
     | 
| 39 | 
         
             
            - For non-plots: answer = "text result"
         
     | 
| 
         | 
|
| 40 | 
         | 
| 41 | 
         
             
            BASIC ERROR PREVENTION:
         
     | 
| 42 | 
         
             
            - Use try/except for complex operations
         
     | 
| 
         | 
|
| 34 | 
         | 
| 35 | 
         
             
            PLOTTING REQUIREMENTS:
         
     | 
| 36 | 
         
             
            - Create plots for visualization requests: fig, ax = plt.subplots(figsize=(12, 7))
         
     | 
| 37 | 
         
            +
            - Display plots directly with Streamlit: st.pyplot(fig); plt.close()
         
     | 
| 38 | 
         
             
            - Store success message: answer = "Plot displayed successfully"
         
     | 
| 39 | 
         
             
            - For non-plots: answer = "text result"
         
     | 
| 40 | 
         
            +
            - NEVER use plt.savefig() - always use st.pyplot(fig) for direct display
         
     | 
| 41 | 
         | 
| 42 | 
         
             
            BASIC ERROR PREVENTION:
         
     | 
| 43 | 
         
             
            - Use try/except for complex operations
         
     | 
    	
        src.py
    CHANGED
    
    | 
         @@ -268,6 +268,7 @@ def ask_question(model_name, question): 
     | 
|
| 268 | 
         
             
            import pandas as pd
         
     | 
| 269 | 
         
             
            import matplotlib.pyplot as plt
         
     | 
| 270 | 
         
             
            import seaborn as sns
         
     | 
| 
         | 
|
| 271 | 
         
             
            import uuid
         
     | 
| 272 | 
         
             
            import calendar
         
     | 
| 273 | 
         
             
            import numpy as np
         
     | 
| 
         | 
|
| 268 | 
         
             
            import pandas as pd
         
     | 
| 269 | 
         
             
            import matplotlib.pyplot as plt
         
     | 
| 270 | 
         
             
            import seaborn as sns
         
     | 
| 271 | 
         
            +
            import streamlit as st
         
     | 
| 272 | 
         
             
            import uuid
         
     | 
| 273 | 
         
             
            import calendar
         
     | 
| 274 | 
         
             
            import numpy as np
         
     |