TravelContentCreator/utils/tweet/content_generator.py

103 lines
3.8 KiB
Python
Raw Normal View History

2025-07-08 18:24:23 +08:00
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
内容生成模块
"""
import logging
import json
from typing import Dict, Any
from core.ai import AIAgent
from core.config import GenerateTopicConfig # We might need a more specific content config later
from utils.prompts import PromptTemplate
from utils.file_io import OutputManager
logger = logging.getLogger(__name__)
class ContentPromptBuilder:
"""构建用于生成推文内容的提示"""
def __init__(self, system_prompt_path: str):
self.system_template = PromptTemplate(system_prompt_path)
def build_prompts(self, topic: Dict[str, Any]) -> Dict[str, str]:
"""
根据单个选题构建系统和用户提示
Args:
topic: 包含选题信息的字典
Returns:
包含 "system" "user" 提示的字典
"""
# 在这个简化版本中,用户提示就是选题本身(格式化为字符串)
# 未来可以扩展以包含更复杂的模板逻辑
user_prompt = f"请根据以下选题信息生成内容:\n\n"
for key, value in topic.items():
user_prompt += f"- {key}: {value}\n"
system_prompt = self.system_template.render(**topic)
return {
"system": system_prompt,
"user": user_prompt,
}
class ContentGenerator:
"""负责为单个选题生成内容"""
def __init__(self, ai_agent: AIAgent, config: GenerateTopicConfig, output_manager: OutputManager):
self.ai_agent = ai_agent
self.config = config # This might need to be a more specific content config
self.output_manager = output_manager
# Assuming content system prompt is defined in the topic config for now
self.prompt_builder = ContentPromptBuilder(
system_prompt_path=self.config.content_system_prompt
)
def generate(self, topic: Dict[str, Any], topic_index: int, variant_index: int) -> Dict[str, Any]:
"""
为单个选题生成一个内容变体
Args:
topic: 选题信息字典
topic_index: 选题的索引
variant_index: 变体的索引
Returns:
包含生成内容的字典
"""
logger.info(f"开始为选题 {topic_index} 生成内容变体 {variant_index}...")
# 1. 构建提示
prompts = self.prompt_builder.build_prompts(topic)
variant_dir = self.output_manager.get_variant_dir(topic_index, variant_index)
self.output_manager.save_text(prompts["system"], "content_system_prompt.txt", subdir=variant_dir.name)
self.output_manager.save_text(prompts["user"], "content_user_prompt.txt", subdir=variant_dir.name)
# 2. 调用AI
try:
raw_result, _, _, _ = self.ai_agent.work(
system_prompt=prompts["system"],
user_prompt=prompts["user"]
)
self.output_manager.save_text(raw_result, "content_raw_response.txt", subdir=variant_dir.name)
except Exception as e:
logger.critical(f"{topic_index}_{variant_index} 生成内容时AI调用失败: {e}", exc_info=True)
return {"error": str(e)}
# 3. 解析和保存结果 (简化版直接保存json)
# 实际应用中需要类似TopicParser的ContentParser
try:
# A simple assumption: the result is a JSON string
content_data = json.loads(raw_result)
self.output_manager.save_json(content_data, "article.json", subdir=variant_dir.name)
logger.info(f"成功为 {topic_index}_{variant_index} 生成并保存内容。")
return content_data
except json.JSONDecodeError as e:
logger.error(f"解析内容JSON失败 for {topic_index}_{variant_index}: {e}")
return {"error": "JSONDecodeError", "raw_content": raw_result}