diff --git a/scripts/news_digest/README.md b/scripts/news_digest/README.md index 0d5396e..c434a31 100644 --- a/scripts/news_digest/README.md +++ b/scripts/news_digest/README.md @@ -33,7 +33,7 @@ Edit `config.json` to add feeds and adjust settings: }, "ollama": { "model": "kamekichi128/qwen3-4b-instruct-2507", - "prompt": "Summarize the following news article in 2-3 concise sentences:" + "prompt": "Summarize the following news article in 2-3 concise sentences (around 50 words):" }, "feeds": [ { diff --git a/scripts/news_digest/config.json b/scripts/news_digest/config.json index 1ec2396..977dccb 100644 --- a/scripts/news_digest/config.json +++ b/scripts/news_digest/config.json @@ -6,7 +6,7 @@ }, "ollama": { "model": "kamekichi128/qwen3-4b-instruct-2507:latest", - "prompt": "Summarize the following news article in 2-3 concise sentences:" + "prompt": "Summarize the following news article in 2-3 concise sentences (around 50 words):" }, "feeds": [ { diff --git a/scripts/news_digest/main.py b/scripts/news_digest/main.py index 12b244c..6c63ec6 100644 --- a/scripts/news_digest/main.py +++ b/scripts/news_digest/main.py @@ -309,7 +309,7 @@ def _run_test(mode: str, config: dict) -> None: sys.exit(1) model = ollama_cfg.get("model", "kamekichi128/qwen3-4b-instruct-2507") - prompt = ollama_cfg.get("prompt", "Summarize the following news article in 2-3 concise sentences:") + prompt = ollama_cfg.get("prompt", "Summarize the following news article in 2-3 concise sentences (around 50 words):") # Build test inputs: hardcoded articles + fetched article (full mode only) articles = list(_TEST_ARTICLES) @@ -388,7 +388,7 @@ def main(): ollama_cfg = config.get("ollama") if ollama_cfg: ollama_model = ollama_cfg.get("model", "kamekichi128/qwen3-4b-instruct-2507") - ollama_prompt = ollama_cfg.get("prompt", "Summarize the following news article in 2-3 concise sentences:") + ollama_prompt = ollama_cfg.get("prompt", "Summarize the following news article in 2-3 concise sentences (around 50 words):") logger.debug("Ollama summarization enabled (model: %s)", ollama_model) else: ollama_model = ollama_prompt = None