Spaces:
Running
on
Zero
Running
on
Zero
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
|
@@ -734,7 +734,7 @@ def generate_prompt_with_glm(image_description: str, user_request: str, style: s
|
|
| 734 |
|
| 735 |
TASK: Combine the image description with the requested changes into ONE image prompt.
|
| 736 |
|
| 737 |
-
GLOBAL RULE:
|
| 738 |
|
| 739 |
ABSOLUTELY FORBIDDEN - NEVER OUTPUT THESE:
|
| 740 |
- Planning text: "Add textures:", "Include lighting:", "The key elements are:"
|
|
@@ -757,7 +757,7 @@ OUTPUT THE IMAGE PROMPT NOW - NOTHING ELSE:"""
|
|
| 757 |
try:
|
| 758 |
response = client.chat.completions.create(
|
| 759 |
model="deepseek-reasoner",
|
| 760 |
-
max_tokens=
|
| 761 |
messages=[
|
| 762 |
{"role": "system", "content": system_prompt},
|
| 763 |
{"role": "user", "content": f"ORIGINAL IMAGE: {desc}\n\nREQUESTED CHANGES: {user_request}{style_hint}"}
|
|
@@ -816,8 +816,8 @@ OUTPUT THE IMAGE PROMPT NOW - NOTHING ELSE:"""
|
|
| 816 |
else:
|
| 817 |
break
|
| 818 |
|
| 819 |
-
# Truncate if too long
|
| 820 |
-
max_words =
|
| 821 |
words = content.split()
|
| 822 |
if len(words) > max_words:
|
| 823 |
content = " ".join(words[:max_words])
|
|
|
|
| 734 |
|
| 735 |
TASK: Combine the image description with the requested changes into ONE image prompt.
|
| 736 |
|
| 737 |
+
GLOBAL RULE: Your output text CANNOT exceed 4000 TOKENS. This is a strict limit. Output ONLY the prompt text itself.
|
| 738 |
|
| 739 |
ABSOLUTELY FORBIDDEN - NEVER OUTPUT THESE:
|
| 740 |
- Planning text: "Add textures:", "Include lighting:", "The key elements are:"
|
|
|
|
| 757 |
try:
|
| 758 |
response = client.chat.completions.create(
|
| 759 |
model="deepseek-reasoner",
|
| 760 |
+
max_tokens=4000,
|
| 761 |
messages=[
|
| 762 |
{"role": "system", "content": system_prompt},
|
| 763 |
{"role": "user", "content": f"ORIGINAL IMAGE: {desc}\n\nREQUESTED CHANGES: {user_request}{style_hint}"}
|
|
|
|
| 816 |
else:
|
| 817 |
break
|
| 818 |
|
| 819 |
+
# Truncate if too long (4000 tokens ~ 3000 words max)
|
| 820 |
+
max_words = 3000
|
| 821 |
words = content.split()
|
| 822 |
if len(words) > max_words:
|
| 823 |
content = " ".join(words[:max_words])
|