Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -53,10 +53,11 @@ TEXT:
|
|
| 53 |
"""
|
| 54 |
|
| 55 |
IMAGE_PROMPT_TEMPLATE = """
|
| 56 |
-
Based on the following story,
|
| 57 |
-
|
| 58 |
-
Include setting, mood,
|
| 59 |
-
|
|
|
|
| 60 |
|
| 61 |
Story:
|
| 62 |
\"\"\"%s\"\"\"
|
|
@@ -85,17 +86,20 @@ def extract_entities(text: str):
|
|
| 85 |
# ββββββββββββββββββββββββββββββββ
|
| 86 |
# Build visual prompt
|
| 87 |
# ββββββββββββββββββββββββββββββββ
|
| 88 |
-
def
|
| 89 |
try:
|
| 90 |
-
prompt_msg = IMAGE_PROMPT_TEMPLATE % story_text
|
| 91 |
resp = together_client.chat.completions.create(
|
| 92 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 93 |
messages=[{"role": "user", "content": prompt_msg}],
|
| 94 |
-
max_tokens=
|
| 95 |
)
|
| 96 |
-
|
|
|
|
|
|
|
| 97 |
except Exception as e:
|
| 98 |
-
|
|
|
|
| 99 |
|
| 100 |
|
| 101 |
|
|
|
|
| 53 |
"""
|
| 54 |
|
| 55 |
IMAGE_PROMPT_TEMPLATE = """
|
| 56 |
+
Based on the following story, write %d distinct vivid scene descriptions, one per line.
|
| 57 |
+
Each line should begin with a dash (-) followed by a detailed image-worthy scene.
|
| 58 |
+
Include setting, mood, characters, and visual cues.
|
| 59 |
+
|
| 60 |
+
Return ONLY the list of scenes, each on its own line.
|
| 61 |
|
| 62 |
Story:
|
| 63 |
\"\"\"%s\"\"\"
|
|
|
|
| 86 |
# ββββββββββββββββββββββββββββββββ
|
| 87 |
# Build visual prompt
|
| 88 |
# ββββββββββββββββββββββββββββββββ
|
| 89 |
+
def generate_image_prompts(story_text: str, count=1):
|
| 90 |
try:
|
| 91 |
+
prompt_msg = IMAGE_PROMPT_TEMPLATE % (count, story_text)
|
| 92 |
resp = together_client.chat.completions.create(
|
| 93 |
model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
|
| 94 |
messages=[{"role": "user", "content": prompt_msg}],
|
| 95 |
+
max_tokens=200,
|
| 96 |
)
|
| 97 |
+
raw_output = resp.choices[0].message.content.strip()
|
| 98 |
+
prompts = [line.strip("-β’ ").strip() for line in raw_output.split("\n") if line.strip()]
|
| 99 |
+
return prompts[:count] # just in case LLM gives more than needed
|
| 100 |
except Exception as e:
|
| 101 |
+
print("β οΈ LLM scene prompt generation failed:", e)
|
| 102 |
+
return []
|
| 103 |
|
| 104 |
|
| 105 |
|