Spaces:
Sleeping
Sleeping
File size: 9,274 Bytes
83a0c1c 1277e7e a6c95f0 aff7c4e 0070f2e 0edee28 1277e7e 1074e19 1277e7e 0070f2e cb34900 0070f2e 1277e7e 21f7a40 cb34900 1277e7e 1074e19 1277e7e 1074e19 cb34900 1277e7e 6e8313e cb34900 6e8313e cb34900 dddfef3 6e8313e cb34900 6e8313e dddfef3 21f7a40 dddfef3 6e8313e 1277e7e 1074e19 75717e1 0ec9c6c 75717e1 1277e7e 420c42c 1277e7e cb34900 1277e7e 420c42c 1277e7e 420c42c 7c60db6 1277e7e 420c42c 1277e7e c1374f7 fc77fc0 42c877f 12463d7 42c877f 1074e19 1277e7e 8ea64e7 1074e19 3253b41 06fd68b 9f3b2d4 fc77fc0 9f3b2d4 fc77fc0 9f3b2d4 1edac95 9f3b2d4 6e8313e 1277e7e 3253b41 c1374f7 6e8313e 1074e19 1277e7e c1374f7 0ce9e39 c1374f7 1277e7e c1374f7 1277e7e 6e8313e 1074e19 75717e1 379ab6b 1074e19 d6f4422 1074e19 379ab6b 1074e19 c01a7a4 1074e19 1277e7e 1074e19 b7c4eb0 1074e19 1edac95 1074e19 1277e7e b7c4eb0 1277e7e 1074e19 b7c4eb0 1277e7e b7c4eb0 1277e7e 1074e19 b7c4eb0 1277e7e b7c4eb0 1277e7e 1074e19 1277e7e 1074e19 1277e7e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 |
import os
import gradio as gr
from checks.status_check import is_endpoint_healthy
from checks.endpoint_utils import wake_endpoint
class ContentAgentUI:
"""
Gradio UI that:
- shows a minimal control panel first (status + Start button),
- auto-initializes the agent on load if the endpoint is already healthy,
- otherwise lets the user 'Start Agent' (wake -> health -> init),
- reveals the main chat panel (with header, guidance, examples, footer) after init.
"""
def __init__(self, endpoint_uri: str, is_healthy: bool, health_message: str, agent_initializer, agent_type:str, compute:str ):
self.endpoint_uri = endpoint_uri
self.is_healthy = bool(is_healthy)
self.health_message = health_message or ""
self.agent_initializer = agent_initializer # callable: (uri) -> CodeAgent
self.agent_type = agent_type or ""
self.compute = compute or ""
# set in build()
self.app: gr.Blocks | None = None
self.status_box = None
self.control_panel = None
self.main_panel = None
self.prompt = None
self.reply = None
self.agent_state = None
self.examples_radio = None
# ---------- helpers ----------
def _create_user_guidance(self):
gr.Markdown("""
Please enter text below to get started. Content Agent will try to determine whether the language is polite and uses the following classification:
- `polite`
- `somewhat polite`
- `neutral`
- `impolite`
Classificiation Scores
- Scoring runs from O to 1
""")
gr.Markdown(f"""
Technology:
- App is running `{self.agent_type}` text generation model.
- Agent uses Intel's Polite Guard NLP library tool
- Compute: {self.compute}
- Content Agent's LLM runs on-demand rather than using resources for 24 hrs/day, 7 days a week
""")
def _initial_status_text(self) -> str:
# neutral; on_load will set real status and maybe auto-init
return "Checking endpoint status…"
def _get_example(self):
example_root = os.path.join(os.path.dirname(__file__), "examples")
examples = []
if os.path.exists(example_root):
example_files = [os.path.join(example_root, f) for f in os.listdir(example_root) if f.endswith(".txt")]
for file_path in example_files:
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
examples.append(f.read())
return examples
def _create_examples(self):
examples = self._get_example()
with self.main_panel:
if examples:
example_radio = gr.Radio(choices=examples, label="Try one of these examples:")
# fill the prompt when an example is picked
example_radio.change(fn=lambda ex: ex, inputs=example_radio, outputs=self.user_input)
else:
gr.Markdown("*No examples found.*")
# ---------- agent call ----------
@staticmethod
def _call_agent(text: str, agent) -> str:
try:
if agent is None:
return "Content Agent's LLM is sleeping and will need to be started. Click 'Start Agent'."
return str(agent.run(text)) # smolagents.CodeAgent API
except Exception as e:
return f"Error: {e}"
# ---------- UI build ----------
def build(self) -> gr.Blocks:
if self.app is not None:
return self.app
# Optional: Adjust path if needed for hosted environments
css_path = os.path.join(os.getcwd(), "ui", "styles.css")
with gr.Blocks(css=css_path if os.path.exists(css_path) else None) as demo:
# global header (always visible)
gr.Markdown("# Content Agent")
# Control panel (shown first; may auto-hide on load)
with gr.Group(visible=True, elem_id= "control_panel" ) as self.control_panel:
gr.Markdown("Testing ")
self.status_box = gr.Textbox(
label="Status",
value=self._initial_status_text(),
lines=8,
interactive=False,
)
start_btn = gr.Button("Start Agent")
gr.HTML("""
<p>It may take up to 5 minutes to wake up the agent.</p>
""")
# Main panel (hidden until agent is initialized)
with gr.Group(visible=False, elem_id= "main" ) as self.main_panel:
# English only
strInput = "Content Input"
strPlaceholder="Copy and paste your content for evaluation here..."
strSubmit = "Submit"
strOutput = "Content feedback"
# Guidance / about
self._create_user_guidance()
# Chat controls
self.agent_state = gr.State(None)
self.prompt = gr.Textbox(label=strInput, placeholder=strPlaceholder)
self.reply = gr.Textbox(label=strOutput, interactive=False, lines=12, max_lines=20)
submit_btn = gr.Button(strSubmit)
# Use bound methods to submit content
submit_btn.click(self._call_agent, inputs=[self.prompt, self.agent_state], outputs=self.reply)
self.prompt.submit(self._call_agent, inputs=[self.prompt, self.agent_state], outputs=self.reply)
# Examples (optional)
self._create_examples()
# Footer
gr.HTML("<div id='footer'>Thanks for trying it out!</div>")
# --- AUTO INIT ON LOAD IF HEALTHY ---
def on_load():
healthy, msg = is_endpoint_healthy(self.endpoint_uri)
if healthy:
try:
agent = self.agent_initializer(self.endpoint_uri)
return (
f"Endpoint healthy ✅ — {msg}. Agent initialized.",
gr.update(visible=False), # hide control panel
gr.update(visible=True), # show main panel
agent,
)
except Exception as e:
return (
f"Agent init failed: {e}",
gr.update(visible=True),
gr.update(visible=False),
None,
)
# not healthy → keep Start button path
return (
f"The AI LLM is sleeping due to inactivity: {msg}\nClick 'Start Agent' to wake and initialize.",
gr.update(visible=True),
gr.update(visible=False),
None,
)
demo.load(
on_load,
inputs=None,
outputs=[self.status_box, self.control_panel, self.main_panel, self.agent_state],
)
# --- MANUAL START (wake → health → init) ---
def on_start():
lines: list[str] = []
def push(s: str):
lines.append(s)
return ("\n".join(lines), gr.update(), gr.update(), None)
# Wake with progress
yield push("Waking endpoint… this can take several minutes for a cold start.")
ok, err = wake_endpoint(self.endpoint_uri, max_wait=600, poll_every=5.0, log=lines.append)
yield ("\n".join(lines), gr.update(), gr.update(), None) # flush all logs
if not ok:
yield push(f"[Server message] {err or 'wake failed'}")
return
# Health → init
yield push("Endpoint awake ✅. Checking health…")
healthy, msg = is_endpoint_healthy(self.endpoint_uri)
if not healthy:
yield push(f"[Server message] {msg}")
return
yield push("Initializing agent…")
try:
agent = self.agent_initializer(self.endpoint_uri)
except Exception as e:
yield push(f"Agent init failed: {e}")
return
yield ("Agent initialized ✅", gr.update(visible=False), gr.update(visible=True), agent)
start_btn.click(
on_start,
inputs=None,
outputs=[self.status_box, self.control_panel, self.main_panel, self.agent_state],
)
self.app = demo
return self.app
# ---------- public API ----------
def launch(self, **kwargs):
return self.build().launch(**kwargs)
|