Spaces:
Sleeping
Sleeping
File size: 7,986 Bytes
884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 7a2bcda 884ae73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 |
import gradio as gr
import requests
import json
# Function to get available models from Pollinations API
def get_available_models():
try:
response = requests.get("https://text.pollinations.ai/models")
if response.status_code == 200:
models_data = response.json()
# Extract just the model names if API returns complex structure
if isinstance(models_data, list):
# If it's a list of strings, return as is
if all(isinstance(m, str) for m in models_data):
return models_data
# If it's a list of dicts, extract model names/id only
elif all(isinstance(m, dict) for m in models_data):
model_names = []
for m in models_data:
# Try to get 'name' or 'id' field, ignore everything else
if 'name' in m and isinstance(m['name'], str):
model_names.append(m['name'])
elif 'id' in m and isinstance(m['id'], str):
model_names.append(m['id'])
return model_names if model_names else [
"openai", "mistral", "mistral-large",
"claude-3.5-sonnet", "llama-3.3-70b", "gemini"
]
# Fallback to default list
return [
"openai",
"mistral",
"mistral-large",
"claude-3.5-sonnet",
"llama-3.3-70b",
"gemini"
]
else:
# Fallback list of models
return [
"openai",
"mistral",
"mistral-large",
"claude-3.5-sonnet",
"llama-3.3-70b",
"gemini"
]
except:
return [
"openai",
"mistral",
"mistral-large",
"claude-3.5-sonnet",
"llama-3.3-70b",
"gemini"
]
# Function to generate text using Pollinations API
def generate_text(prompt, model, seed, system, temperature, max_tokens, top_p):
if not prompt:
return "Please enter a prompt."
try:
# Prepare the API request using the same format as user's code
url = "https://text.pollinations.ai/"
# Build the query parameters
params = {
"model": model,
"prompt": prompt,
}
# Add optional parameters if provided
if seed:
params["seed"] = int(seed)
if system:
params["system"] = system
if temperature is not None:
params["temperature"] = temperature
if max_tokens:
params["max_tokens"] = int(max_tokens)
if top_p is not None:
params["top_p"] = top_p
# Make the request
response = requests.get(url, params=params)
if response.status_code == 200:
result_text = response.text
# Try to parse as JSON for better formatting
try:
json_result = json.loads(result_text)
return f"```json\n{json.dumps(json_result, indent=2)}\n```"
except:
# Return as plain text if not JSON
return result_text
else:
return f"Error: API returned status code {response.status_code}\n{response.text}"
except Exception as e:
return f"Error: {str(e)}"
# Get available models
available_models = get_available_models()
# Create Gradio interface
with gr.Blocks(title="Pollinations Text Generator") as demo:
gr.Markdown(
"""
# 🌸 Pollinations Text Generator
Generate text using various AI models via the Pollinations API.
Select a model and provide a prompt to get started!
"""
)
with gr.Row():
with gr.Column():
prompt_input = gr.Textbox(
label="Prompt",
placeholder="Enter your text prompt here...",
lines=5
)
model_dropdown = gr.Dropdown(
choices=available_models,
label="Model",
value=available_models[0] if available_models else "openai",
info="Select the AI model to use for text generation"
)
with gr.Accordion("Advanced Settings", open=False):
seed_input = gr.Number(
label="Seed (optional)",
value=None,
precision=0,
info="Random seed for reproducible results"
)
system_input = gr.Textbox(
label="System Prompt (optional)",
placeholder="Enter system instructions...",
lines=2,
info="System-level instructions for the model"
)
temperature_slider = gr.Slider(
minimum=0,
maximum=2,
value=0.7,
step=0.1,
label="Temperature",
info="Controls randomness (higher = more creative)"
)
max_tokens_slider = gr.Slider(
minimum=1,
maximum=2048,
value=512,
step=1,
label="Max Tokens",
info="Maximum length of the generated text"
)
top_p_slider = gr.Slider(
minimum=0,
maximum=1,
value=0.9,
step=0.05,
label="Top P",
info="Nucleus sampling parameter"
)
generate_btn = gr.Button("Generate", variant="primary")
with gr.Column():
output_display = gr.Markdown(
value="_Your generated text will appear here..._",
label="Generated Text"
)
# Add a readonly textbox for easy copying
with gr.Accordion("Copy Output (Plain Text)", open=False):
output_copy = gr.Textbox(
label="Copyable Output",
lines=15,
show_copy_button=True,
interactive=False
)
gr.Markdown(
"""
### About
This Space uses the [Pollinations API](https://github.com/pollinations/pollinations) for text generation.
The API supports multiple models and is free to use.
**Parameters:**
- **Model**: Choose from available AI models
- **Seed**: Set a random seed for reproducible outputs
- **System**: Provide system-level instructions
- **Temperature**: Control response creativity (0=deterministic, 2=very creative)
- **Max Tokens**: Set maximum response length
- **Top P**: Control diversity via nucleus sampling
"""
)
# Set up the generate button action
def generate_and_display(prompt, model, seed, system, temp, max_tok, top_p):
result = generate_text(prompt, model, seed, system, temp, max_tok, top_p)
# Return both markdown formatted and plain text versions
return result, result
generate_btn.click(
fn=generate_and_display,
inputs=[
prompt_input,
model_dropdown,
seed_input,
system_input,
temperature_slider,
max_tokens_slider,
top_p_slider
],
outputs=[output_display, output_copy]
)
# Launch the app
if __name__ == "__main__":
demo.launch() |