|
| 1 | +import streamlit as st |
| 2 | +import pandas as pd |
| 3 | +import asyncio |
| 4 | +import aiohttp |
| 5 | +import base64 |
| 6 | +import logging |
| 7 | +import os |
| 8 | +import re |
| 9 | +import random |
| 10 | +import string |
| 11 | +from datetime import datetime, timedelta |
| 12 | + |
| 13 | +# Placeholder for OpenAI and Replicate API keys |
| 14 | +openai_api_key = "your_openai_api_key_here" |
| 15 | +replicate_api_token = "your_replicate_api_token_here" |
| 16 | + |
| 17 | +# Placeholder for websites dictionary |
| 18 | +websites = { |
| 19 | + 1: { |
| 20 | + "url": "https://example.com/wp-json/wp/v2/", |
| 21 | + "user": "username", |
| 22 | + "password": "password", |
| 23 | + }, |
| 24 | + # Add more websites as needed |
| 25 | +} |
| 26 | + |
| 27 | +# Setup logging |
| 28 | +logging.basicConfig( |
| 29 | + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" |
| 30 | +) |
| 31 | + |
| 32 | + |
| 33 | +# Utility functions |
| 34 | +def format_url(url): |
| 35 | + if not url.startswith(("http://", "https://")): |
| 36 | + return "https://" + url |
| 37 | + return url |
| 38 | + |
| 39 | + |
| 40 | +def clean_text(text): |
| 41 | + return ( |
| 42 | + text.replace("##", "") |
| 43 | + .replace("**", "") |
| 44 | + .replace("###", "") |
| 45 | + .replace("####", "") |
| 46 | + .strip() |
| 47 | + ) |
| 48 | + |
| 49 | + |
| 50 | +def get_auth_header(user, password): |
| 51 | + auth_string = f"{user}:{password}" |
| 52 | + base64_auth = base64.b64encode(auth_string.encode("utf-8")).decode("utf-8") |
| 53 | + return {"Authorization": f"Basic {base64_auth}", "User-Agent": "Mozilla/5.0"} |
| 54 | + |
| 55 | + |
| 56 | +def format_permalink(title): |
| 57 | + permalink = re.sub(r"[^a-zA-Z0-9\s]", "", title) |
| 58 | + words = permalink.lower().split()[:6] |
| 59 | + formatted_permalink = "-".join(words) |
| 60 | + return formatted_permalink |
| 61 | + |
| 62 | + |
| 63 | +def generate_random_string(length=4): |
| 64 | + return "".join(random.choices(string.ascii_lowercase + string.digits, k=length)) |
| 65 | + |
| 66 | + |
| 67 | +def get_custom_filename(keyword=None): |
| 68 | + random_suffix = generate_random_string() |
| 69 | + if keyword: |
| 70 | + return f"{keyword}-{random_suffix}.webp" |
| 71 | + else: |
| 72 | + return f"{random_suffix}.webp" |
| 73 | + |
| 74 | + |
| 75 | +# Async functions |
| 76 | +async def fetch_serper_results_async(keyword, input_country): |
| 77 | + url = "https://google.serper.dev/search" |
| 78 | + headers = { |
| 79 | + "X-API-KEY": "your_serper_api_key_here", |
| 80 | + "Content-Type": "application/json", |
| 81 | + } |
| 82 | + payload = {"q": keyword, "gl": input_country} |
| 83 | + |
| 84 | + async with aiohttp.ClientSession() as session: |
| 85 | + try: |
| 86 | + async with session.post(url, headers=headers, json=payload) as response: |
| 87 | + if response.status == 200: |
| 88 | + data = await response.json() |
| 89 | + return data |
| 90 | + else: |
| 91 | + st.error(f"Failed to fetch Serper results: {response.status}") |
| 92 | + except Exception as e: |
| 93 | + st.error(f"Error fetching Serper results: {e}") |
| 94 | + return None |
| 95 | + |
| 96 | + |
| 97 | +async def generate_image_async(image_prompt, image_output_count, image_ai_model): |
| 98 | + async with aiohttp.ClientSession() as session: |
| 99 | + try: |
| 100 | + async with session.post( |
| 101 | + "https://api.replicate.com/v1/predictions", |
| 102 | + json={"prompt": image_prompt, "num_outputs": image_output_count}, |
| 103 | + headers={"Authorization": f"Token {replicate_api_token}"}, |
| 104 | + ) as response: |
| 105 | + if response.status == 200: |
| 106 | + data = await response.json() |
| 107 | + return data["outputs"][0]["url"] |
| 108 | + else: |
| 109 | + st.error(f"Failed to generate image: {response.status}") |
| 110 | + except Exception as e: |
| 111 | + st.error(f"Error generating image: {e}") |
| 112 | + return None |
| 113 | + |
| 114 | + |
| 115 | +async def create_post_async( |
| 116 | + content, |
| 117 | + website, |
| 118 | + keyword, |
| 119 | + description, |
| 120 | + media_id, |
| 121 | + category, |
| 122 | + meta_keywords, |
| 123 | + meta_description, |
| 124 | + cached_title, |
| 125 | + tag_names, |
| 126 | + slug_function, |
| 127 | + publish_status, |
| 128 | + post_date_adjustment, |
| 129 | +): |
| 130 | + headers = get_auth_header(website["user"], website["password"]) |
| 131 | + slug = slug_function(cached_title) |
| 132 | + category_id = 1 # Placeholder, implement get_category_id function if needed |
| 133 | + tag_ids = [] # Placeholder, implement get_tag_id function if needed |
| 134 | + post_date = (datetime.now() + timedelta(days=post_date_adjustment)).strftime( |
| 135 | + "%Y-%m-%dT%H:%M:%S" |
| 136 | + ) |
| 137 | + |
| 138 | + data = { |
| 139 | + "title": cached_title, |
| 140 | + "slug": slug, |
| 141 | + "content": content, |
| 142 | + "status": publish_status, |
| 143 | + "categories": [category_id], |
| 144 | + "tags": tag_ids, |
| 145 | + "featured_media": media_id, |
| 146 | + "date": post_date, |
| 147 | + "meta": { |
| 148 | + "rank_math_focus_keyword": keyword, |
| 149 | + "rank_math_description": meta_description, |
| 150 | + "rank_math_keywords": meta_keywords, |
| 151 | + }, |
| 152 | + } |
| 153 | + |
| 154 | + async with aiohttp.ClientSession() as session: |
| 155 | + try: |
| 156 | + async with session.post( |
| 157 | + f"{website['url']}posts", headers=headers, json=data |
| 158 | + ) as response: |
| 159 | + response.raise_for_status() |
| 160 | + post_data = await response.json() |
| 161 | + permalink = post_data.get("link", "") |
| 162 | + post_id = post_data["id"] |
| 163 | + st.success(f"Post published successfully. Permalink: {permalink}") |
| 164 | + return permalink, post_id, cached_title |
| 165 | + except Exception as e: |
| 166 | + st.error(f"Failed to publish post: {e}") |
| 167 | + return None, None, None |
| 168 | + |
| 169 | + |
| 170 | +# Streamlit UI |
| 171 | +st.title("WordPress Content Manager") |
| 172 | + |
| 173 | +# Sidebar for global settings |
| 174 | +st.sidebar.header("Global Settings") |
| 175 | +input_language = st.sidebar.selectbox( |
| 176 | + "Input Language", ["English", "Spanish", "French", "German"] |
| 177 | +) |
| 178 | +input_openai_model = st.sidebar.selectbox("OpenAI Model", ["gpt-3.5-turbo", "gpt-4"]) |
| 179 | +input_country = st.sidebar.selectbox("Search Country", ["us", "uk", "ca", "au"]) |
| 180 | +image_output_count = st.sidebar.number_input( |
| 181 | + "Number of Images to Generate", min_value=1, max_value=5, value=1 |
| 182 | +) |
| 183 | +image_ai_model = st.sidebar.selectbox( |
| 184 | + "Image AI Model", ["stability-ai/stable-diffusion", "midjourney/v4"] |
| 185 | +) |
| 186 | +post_date_adjustment = st.sidebar.number_input("Post Date Adjustment (days)", value=-5) |
| 187 | +publish_status = st.sidebar.selectbox( |
| 188 | + "Publish Status", ["publish", "draft", "pending", "private"] |
| 189 | +) |
| 190 | + |
| 191 | +# Main content area |
| 192 | +tab1, tab2, tab3, tab4 = st.tabs( |
| 193 | + ["Content Generation", "Post Editing", "Post Deletion", "Bulk Operations"] |
| 194 | +) |
| 195 | + |
| 196 | +with tab1: |
| 197 | + st.header("Content Generation") |
| 198 | + |
| 199 | + # Input for website URL and keywords |
| 200 | + website_url = st.text_input("Website URL") |
| 201 | + keywords = st.text_area("Keywords (one per line)") |
| 202 | + |
| 203 | + if st.button("Generate Content"): |
| 204 | + if website_url and keywords: |
| 205 | + keywords_list = [k.strip() for k in keywords.split("\n") if k.strip()] |
| 206 | + |
| 207 | + for keyword in keywords_list: |
| 208 | + st.write(f"Processing keyword: {keyword}") |
| 209 | + |
| 210 | + # Fetch SERP data |
| 211 | + serper_data = asyncio.run( |
| 212 | + fetch_serper_results_async(keyword, input_country) |
| 213 | + ) |
| 214 | + |
| 215 | + if serper_data: |
| 216 | + st.write("SERP data fetched successfully") |
| 217 | + |
| 218 | + # Generate SEO elements (placeholder function) |
| 219 | + seo_elements = { |
| 220 | + "meta_keywords": f"{keyword}, related, terms", |
| 221 | + "meta_description": f"Description for {keyword}", |
| 222 | + "img_alt": f"Image related to {keyword}", |
| 223 | + "website_title": f"Title for {keyword}", |
| 224 | + "ai_img_prompt": f"Generate an image for {keyword}", |
| 225 | + "website_category": "General", |
| 226 | + } |
| 227 | + |
| 228 | + # Generate content (placeholder function) |
| 229 | + content = f"<h1>Article about {keyword}</h1><p>This is a placeholder for the generated content.</p>" |
| 230 | + |
| 231 | + # Generate image |
| 232 | + image_url = asyncio.run( |
| 233 | + generate_image_async( |
| 234 | + seo_elements["ai_img_prompt"], |
| 235 | + image_output_count, |
| 236 | + image_ai_model, |
| 237 | + ) |
| 238 | + ) |
| 239 | + |
| 240 | + if image_url: |
| 241 | + st.image(image_url, caption=f"Generated image for {keyword}") |
| 242 | + |
| 243 | + # Publish post |
| 244 | + website = websites[1] # Using the first website as an example |
| 245 | + media_id = 1 # Placeholder, implement upload_image_to_wp function if needed |
| 246 | + |
| 247 | + result = asyncio.run( |
| 248 | + create_post_async( |
| 249 | + content, |
| 250 | + website, |
| 251 | + keyword, |
| 252 | + seo_elements["meta_description"], |
| 253 | + media_id, |
| 254 | + seo_elements["website_category"], |
| 255 | + seo_elements["meta_keywords"], |
| 256 | + seo_elements["meta_description"], |
| 257 | + seo_elements["website_title"], |
| 258 | + [keyword], |
| 259 | + format_permalink, |
| 260 | + publish_status, |
| 261 | + post_date_adjustment, |
| 262 | + ) |
| 263 | + ) |
| 264 | + |
| 265 | + if result: |
| 266 | + permalink, post_id, article_title = result |
| 267 | + st.success(f"Post published: {article_title}") |
| 268 | + st.write(f"Permalink: {permalink}") |
| 269 | + st.write(f"Post ID: {post_id}") |
| 270 | + else: |
| 271 | + st.error(f"Failed to fetch SERP data for {keyword}") |
| 272 | + else: |
| 273 | + st.warning("Please enter a website URL and at least one keyword.") |
| 274 | + |
| 275 | +with tab2: |
| 276 | + st.header("Post Editing") |
| 277 | + |
| 278 | + edit_post_id = st.number_input("Post ID to Edit", min_value=1) |
| 279 | + edit_website = st.selectbox("Select Website", list(websites.keys())) |
| 280 | + |
| 281 | + edit_title = st.text_input("New Title (optional)") |
| 282 | + edit_content = st.text_area("New Content (optional)") |
| 283 | + |
| 284 | + if st.button("Update Post"): |
| 285 | + if edit_post_id and edit_website: |
| 286 | + # Placeholder for post editing logic |
| 287 | + st.success( |
| 288 | + f"Post {edit_post_id} updated successfully on {websites[edit_website]['url']}" |
| 289 | + ) |
| 290 | + else: |
| 291 | + st.warning("Please enter a Post ID and select a website.") |
| 292 | + |
| 293 | +with tab3: |
| 294 | + st.header("Post Deletion") |
| 295 | + |
| 296 | + delete_post_id = st.number_input("Post ID to Delete", min_value=1) |
| 297 | + delete_website = st.selectbox("Select Website for Deletion", list(websites.keys())) |
| 298 | + |
| 299 | + if st.button("Delete Post"): |
| 300 | + if delete_post_id and delete_website: |
| 301 | + # Placeholder for post deletion logic |
| 302 | + st.success( |
| 303 | + f"Post {delete_post_id} deleted successfully from {websites[delete_website]['url']}" |
| 304 | + ) |
| 305 | + else: |
| 306 | + st.warning("Please enter a Post ID and select a website.") |
| 307 | + |
| 308 | +with tab4: |
| 309 | + st.header("Bulk Operations") |
| 310 | + |
| 311 | + uploaded_file = st.file_uploader("Upload CSV file", type="csv") |
| 312 | + |
| 313 | + if uploaded_file is not None: |
| 314 | + df = pd.read_csv(uploaded_file) |
| 315 | + st.write(df) |
| 316 | + |
| 317 | + if st.button("Process Bulk Operations"): |
| 318 | + # Placeholder for bulk operations logic |
| 319 | + st.info("Processing bulk operations...") |
| 320 | + # Implement the logic to process the CSV file and perform bulk operations |
| 321 | + st.success("Bulk operations completed successfully") |
| 322 | + |
| 323 | +# Footer |
| 324 | +st.markdown("---") |
| 325 | +st.write("WordPress Content Manager v1.0") |
0 commit comments