add agent workspace
This commit is contained in:
16
scripts/email_processor/config.json
Normal file
16
scripts/email_processor/config.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"imap": {
|
||||
"host": "imap.migadu.com",
|
||||
"port": 993,
|
||||
"email": "youlu@luyanxin.com",
|
||||
"password": "kDkNau2r7m.hV!uk*D4Yr8mC7Dyjx9T"
|
||||
},
|
||||
"ollama": {
|
||||
"host": "http://localhost:11434",
|
||||
"model": "qwen3:4b"
|
||||
},
|
||||
"rules": {
|
||||
"max_body_length": 1000,
|
||||
"check_unseen_only": true
|
||||
}
|
||||
}
|
||||
52
scripts/email_processor/data/pending_emails.json
Normal file
52
scripts/email_processor/data/pending_emails.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"msg_f1d43ea3": {
|
||||
"imap_uid": "2",
|
||||
"subject": "Delivered: \"Voikinfo Bottom Gusset Bags...\"",
|
||||
"sender": "\"Amazon.com - order-update(a)amazon.com\"\r\n <order-update_at_amazon_com_posyo@simplelogin.co>",
|
||||
"recipient": "sho.amazon@ylu17.com",
|
||||
"summary": "Your Amazon package (order #114-1496788-7649829) was delivered today to Argo, Los Angeles, CA and left near the front door or porch.",
|
||||
"email_date": "Wed, 18 Feb 2026 04:15:24 +0000",
|
||||
"status": "pending",
|
||||
"found_at": "2026-02-18T16:18:42.347538"
|
||||
},
|
||||
"msg_60c56a87": {
|
||||
"imap_uid": "3",
|
||||
"subject": "=?UTF-8?b?5L2V5LiN5ruh6Laz6Ieq5bex55qE5Y+j6IW55LmL5qyy?=",
|
||||
"sender": "\"Uber Eats - uber(a)uber.com\" <uber_at_uber_com_kjwzyhxn@simplelogin.co>",
|
||||
"recipient": "uber@ylu17.com",
|
||||
"summary": "Uber Eats has sent a notification that the user's order is ready for pickup.",
|
||||
"email_date": "Wed, 18 Feb 2026 11:36:59 +0000",
|
||||
"status": "pending",
|
||||
"found_at": "2026-02-18T08:05:56.594842"
|
||||
},
|
||||
"msg_ebd24205": {
|
||||
"imap_uid": "4",
|
||||
"subject": "Your order has been shipped (or closed if combined/delivered).",
|
||||
"sender": "\"cd(a)woodenswords.com\"\r\n <cd_at_woodenswords_com_xivwijojc@simplelogin.co>",
|
||||
"recipient": "mail@luyx.org",
|
||||
"summary": "This email confirms that your order has been shipped or closed (if combined/delivered).",
|
||||
"email_date": "Wed, 18 Feb 2026 16:07:58 +0000",
|
||||
"status": "pending",
|
||||
"found_at": "2026-02-18T12:01:19.048091"
|
||||
},
|
||||
"msg_fa73b3bd": {
|
||||
"imap_uid": "6",
|
||||
"subject": "=?UTF-8?Q?Yanxin,_I=E2=80=99m_still_waiting_for_your_response?=",
|
||||
"sender": "\"Arslan (via LinkedIn) - messages-noreply(a)linkedin.com\"\r\n <messages-noreply_at_linkedin_com_ajpnalmwp@simplelogin.co>",
|
||||
"recipient": "Yanxin Lu <acc.linkedin@ylu17.com>",
|
||||
"summary": "Arslan Ahmed, a Senior AI | ML | Full Stack Engineer from Ilford, invited you to connect on February 11, 2026 at 10:08 PM and is waiting for your response.",
|
||||
"email_date": "Wed, 18 Feb 2026 18:53:45 +0000 (UTC)",
|
||||
"status": "pending",
|
||||
"found_at": "2026-02-18T12:04:34.602407"
|
||||
},
|
||||
"msg_59f23736": {
|
||||
"imap_uid": "1",
|
||||
"subject": "New Software Engineer jobs that match your profile",
|
||||
"sender": "\"LinkedIn - jobs-noreply(a)linkedin.com\"\r\n <jobs-noreply_at_linkedin_com_zuwggfxh@simplelogin.co>",
|
||||
"recipient": "Yanxin Lu <acc.linkedin@ylu17.com>",
|
||||
"summary": "LinkedIn has notified the user of new software engineering jobs that match their profile and includes a link to update their top card.",
|
||||
"email_date": "Wed, 18 Feb 2026 02:07:12 +0000 (UTC)",
|
||||
"status": "pending",
|
||||
"found_at": "2026-02-18T16:16:00.784822"
|
||||
}
|
||||
}
|
||||
50
scripts/email_processor/logs/2026-02-15.log
Normal file
50
scripts/email_processor/logs/2026-02-15.log
Normal file
@@ -0,0 +1,50 @@
|
||||
[2026-02-15 21:14:02] KEPT: Please confirm your mailbox youlu@luyanxin.com
|
||||
From: "noreply@simplelogin.io" <noreply@simplelogin.io>
|
||||
Analysis: KEEP: Legitimate service confirmation email for mailbox addition (not promotional)
|
||||
|
||||
[2026-02-15 21:15:04] KEPT: =?utf-8?B?RndkOiBHZXQgMTAlIG9mZiB5b3VyIG5leHQgb3JkZXIg4pyF?=
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: error - HTTPConnectionPool(host='localhost', port=11434): Read timed out. (read timeout=60)
|
||||
|
||||
[2026-02-15 21:15:37] KEPT:
|
||||
=?utf-8?B?RndkOiDigJxzb2Z0d2FyZSBlbmdpbmVlcuKAnTogTWljcm9
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: LinkedIn job alert notification for subscribed job search (not promotional)
|
||||
|
||||
[2026-02-15 21:15:52] KEPT: Fwd: Your receipt from OpenRouter, Inc #2231-9732
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: This is a legitimate receipt for a payment made to OpenRouter, Inc (a known AI service provider), not promotional content.
|
||||
|
||||
[2026-02-15 21:16:10] KEPT: Fwd: Your ChatGPT code is 217237
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: Legitimate security verification code from OpenAI (standard login confirmation)
|
||||
|
||||
[2026-02-15 22:49:44] KEPT (69.0s): =?UTF-8?B?5rWL6K+V6YKu5Lu2?=
|
||||
From: Yanxin Lu <lyx@luyanxin.com>
|
||||
Analysis: KEEP: Test email for delivery verification
|
||||
|
||||
From: Yanxin Lu <lyx@luyanxin.com>
|
||||
Analysis: KEEP: Test email for delivery verification
|
||||
|
||||
[2026-02-15 22:57:03] MOVED_TO_TRASH (68.5s): =?utf-8?B?RndkOiBHZXQgMTAlIG9mZiB5b3VyIG5leHQgb3JkZXIg4pyF?=
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: AD: Forwarded Uber promotional offer
|
||||
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: AD: Forwarded Uber promotional offer
|
||||
|
||||
[2026-02-15 23:00:09] KEPT (120.1s): Fwd: Your ChatGPT code is 217237
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: error - HTTPConnectionPool(host='localhost', port=11434): Read timed out. (read timeout=120)
|
||||
|
||||
From: "Yanxin Lu - crac1017(a)hotmail.com"
|
||||
<crac1017_at_hotmail_com_fndbbu@simplelogin.co>
|
||||
Analysis: KEEP: error - HTTPConnectionPool(host='localhost', port=11434): Read timed out. (read timeout=120)
|
||||
|
||||
29
scripts/email_processor/logs/2026-02-18.log
Normal file
29
scripts/email_processor/logs/2026-02-18.log
Normal file
@@ -0,0 +1,29 @@
|
||||
[2026-02-18 08:04:26] ADDED_TO_PENDING (msg_f1d43ea3) (108.6s): Delivered: "Voikinfo Bottom Gusset Bags..."
|
||||
From: "Amazon.com - order-update(a)amazon.com"
|
||||
<order-update_at_amazon_com_posyo@simplelogin.co>
|
||||
Analysis: KEEP: Standard delivery confirmation from Amazon
|
||||
|
||||
[2026-02-18 08:05:56] ADDED_TO_PENDING (msg_60c56a87) (88.0s): =?UTF-8?b?5L2V5LiN5ruh6Laz6Ieq5bex55qE5Y+j6IW55LmL5qyy?=
|
||||
From: "Uber Eats - uber(a)uber.com" <uber_at_uber_com_kjwzyhxn@simplelogin.co>
|
||||
Analysis: KEEP: The decoded subject line "Your Uber Eats order is ready!" indicates a transactional order update, not an advertisement.
|
||||
|
||||
[2026-02-18 12:01:19] ADDED_TO_PENDING (msg_ebd24205) (66.7s): Your order has been shipped (or closed if combined/delivered
|
||||
From: "cd(a)woodenswords.com"
|
||||
<cd_at_woodenswords_com_xivwijojc@simplelogin.co>
|
||||
Analysis: KEEP: System-generated shipping update notification from an e-commerce store, not promotional content.
|
||||
|
||||
[2026-02-18 12:03:36] MOVED_TO_TRASH (133.4s): =?UTF-8?Q?=E2=80=9Csoftware_engineer=E2=80=9D:_Snap_Inc._-_S
|
||||
From: "LinkedIn Job Alerts - jobalerts-noreply(a)linkedin.com"
|
||||
<jobalerts-noreply_at_linkedin_com_cnrlhok@simplelogin.co>
|
||||
Analysis: AD: This email is a promotional job alert notification from LinkedIn's service for users who have set up job preferences.
|
||||
|
||||
[2026-02-18 12:04:34] ADDED_TO_PENDING (msg_fa73b3bd) (57.3s): =?UTF-8?Q?Yanxin,_I=E2=80=99m_still_waiting_for_your_respons
|
||||
From: "Arslan (via LinkedIn) - messages-noreply(a)linkedin.com"
|
||||
<messages-noreply_at_linkedin_com_ajpnalmwp@simplelogin.co>
|
||||
Analysis: KEEP: This is a standard LinkedIn connection request notification with no promotional content, discounts, or advertisements—only a reminder of an existing invitation.
|
||||
|
||||
[2026-02-18 16:18:42] ADDED_TO_PENDING (msg_f1d43ea3) (102.1s): Delivered: "Voikinfo Bottom Gusset Bags..."
|
||||
From: "Amazon.com - order-update(a)amazon.com"
|
||||
<order-update_at_amazon_com_posyo@simplelogin.co>
|
||||
Analysis: KEEP: Standard delivery confirmation from Amazon, not a promotional message.
|
||||
|
||||
295
scripts/email_processor/main.py
Normal file
295
scripts/email_processor/main.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Email Processor - Auto filter ads using local Qwen3
|
||||
Moves ad emails to Trash folder (not permanently deleted)
|
||||
"""
|
||||
|
||||
import json
|
||||
import imaplib
|
||||
import email
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Config
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
CONFIG_FILE = SCRIPT_DIR / "config.json"
|
||||
LOGS_DIR = SCRIPT_DIR / "logs"
|
||||
DATA_DIR = SCRIPT_DIR / "data"
|
||||
PENDING_FILE = DATA_DIR / "pending_emails.json"
|
||||
|
||||
def load_config():
|
||||
"""Load configuration"""
|
||||
with open(CONFIG_FILE) as f:
|
||||
return json.load(f)
|
||||
|
||||
def connect_imap(config):
|
||||
"""Connect to IMAP server"""
|
||||
imap_config = config['imap']
|
||||
mail = imaplib.IMAP4_SSL(imap_config['host'], imap_config['port'])
|
||||
mail.login(imap_config['email'], imap_config['password'])
|
||||
return mail
|
||||
|
||||
def get_unseen_emails(mail):
|
||||
"""Get list of unseen email IDs"""
|
||||
mail.select('INBOX')
|
||||
_, search_data = mail.search(None, 'UNSEEN')
|
||||
email_ids = search_data[0].split()
|
||||
return email_ids
|
||||
|
||||
def fetch_email(mail, email_id):
|
||||
"""Fetch email content"""
|
||||
_, msg_data = mail.fetch(email_id, '(RFC822)')
|
||||
raw_email = msg_data[0][1]
|
||||
msg = email.message_from_bytes(raw_email)
|
||||
|
||||
# Extract subject
|
||||
subject = msg['Subject'] or '(No Subject)'
|
||||
|
||||
# Extract sender
|
||||
sender = msg['From'] or '(Unknown)'
|
||||
|
||||
# Extract recipient
|
||||
recipient = msg['To'] or '(Unknown)'
|
||||
|
||||
# Extract date
|
||||
date = msg['Date'] or datetime.now().isoformat()
|
||||
|
||||
# Extract body
|
||||
body = ""
|
||||
if msg.is_multipart():
|
||||
for part in msg.walk():
|
||||
if part.get_content_type() == "text/plain":
|
||||
try:
|
||||
body = part.get_payload(decode=True).decode('utf-8', errors='ignore')
|
||||
break
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
body = msg.get_payload(decode=True).decode('utf-8', errors='ignore')
|
||||
except:
|
||||
pass
|
||||
|
||||
return {
|
||||
'id': email_id,
|
||||
'subject': subject,
|
||||
'sender': sender,
|
||||
'recipient': recipient,
|
||||
'date': date,
|
||||
'body': body[:300] # Limit body length
|
||||
}
|
||||
|
||||
def analyze_with_qwen3(email_data, config):
|
||||
"""Analyze email with local Qwen3 using official library"""
|
||||
import ollama
|
||||
import time
|
||||
|
||||
prompt = f"""Analyze this email and provide two pieces of information:
|
||||
|
||||
1. Is this an advertisement/promotional email?
|
||||
2. Summarize the email in one sentence
|
||||
|
||||
Email details:
|
||||
Subject: {email_data['subject']}
|
||||
Sender: {email_data['sender']}
|
||||
Body: {email_data['body'][:300]}
|
||||
|
||||
Respond in this exact format:
|
||||
IsAD: [YES or NO]
|
||||
Summary: [one sentence summary]
|
||||
Reason: [brief explanation]
|
||||
"""
|
||||
|
||||
start_time = time.time()
|
||||
model = config['ollama'].get('model', 'qwen3:4b')
|
||||
|
||||
try:
|
||||
response = ollama.generate(model=model, prompt=prompt, options={'temperature': 0.1})
|
||||
output = response['response']
|
||||
|
||||
# Parse output
|
||||
is_ad = False
|
||||
summary = "No summary"
|
||||
reason = "Unknown"
|
||||
|
||||
for line in output.strip().split('\n'):
|
||||
if line.startswith('IsAD:'):
|
||||
is_ad = 'YES' in line.upper()
|
||||
elif line.startswith('Summary:'):
|
||||
summary = line.replace('Summary:', '').strip()[:200]
|
||||
elif line.startswith('Reason:'):
|
||||
reason = line.replace('Reason:', '').strip()
|
||||
|
||||
if is_ad:
|
||||
result = f"AD: {reason}"
|
||||
else:
|
||||
result = f"KEEP: {reason}"
|
||||
|
||||
except Exception as e:
|
||||
result = f"KEEP: error - {str(e)[:100]}"
|
||||
summary = "Analysis failed"
|
||||
is_ad = False
|
||||
|
||||
duration = time.time() - start_time
|
||||
return result, summary, is_ad, duration
|
||||
|
||||
def move_to_trash(mail, email_id):
|
||||
"""Move email to Trash folder"""
|
||||
# Copy to Trash
|
||||
result = mail.copy(email_id, 'Trash')
|
||||
if result[0] == 'OK':
|
||||
# Mark original as deleted
|
||||
mail.store(email_id, '+FLAGS', '\\Deleted')
|
||||
return True
|
||||
return False
|
||||
|
||||
def log_result(log_file, email_data, analysis, action, duration=None):
|
||||
"""Log processing result with Qwen3 duration"""
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
duration_str = f" ({duration:.1f}s)" if duration else ""
|
||||
with open(log_file, 'a') as f:
|
||||
f.write(f"[{timestamp}] {action}{duration_str}: {email_data['subject'][:60]}\n")
|
||||
f.write(f" From: {email_data['sender']}\n")
|
||||
f.write(f" Analysis: {analysis}\n\n")
|
||||
|
||||
def load_pending():
|
||||
"""Load pending emails from JSON file"""
|
||||
if not PENDING_FILE.exists():
|
||||
return {}
|
||||
with open(PENDING_FILE, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def save_pending(pending):
|
||||
"""Save pending emails to JSON file"""
|
||||
DATA_DIR.mkdir(exist_ok=True)
|
||||
with open(PENDING_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(pending, f, indent=2, ensure_ascii=False)
|
||||
|
||||
def add_to_pending(email_data, summary, imap_uid, recipient):
|
||||
"""Add email to pending queue"""
|
||||
pending = load_pending()
|
||||
|
||||
# Generate unique ID
|
||||
import hashlib
|
||||
msg_id = f"msg_{hashlib.md5(f'{imap_uid}_{email_data['subject']}'.encode()).hexdigest()[:8]}"
|
||||
|
||||
# Extract date from email
|
||||
email_date = email_data.get('date', datetime.now().isoformat())
|
||||
|
||||
pending[msg_id] = {
|
||||
"imap_uid": str(imap_uid),
|
||||
"subject": email_data['subject'],
|
||||
"sender": email_data['sender'],
|
||||
"recipient": recipient,
|
||||
"summary": summary,
|
||||
"email_date": email_date,
|
||||
"status": "pending",
|
||||
"found_at": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
save_pending(pending)
|
||||
return msg_id
|
||||
|
||||
def main():
|
||||
"""Main processing function"""
|
||||
print("📧 Email Processor Starting...")
|
||||
|
||||
# Load config
|
||||
config = load_config()
|
||||
|
||||
# Setup logging
|
||||
LOGS_DIR.mkdir(exist_ok=True)
|
||||
log_file = LOGS_DIR / f"{datetime.now().strftime('%Y-%m-%d')}.log"
|
||||
|
||||
try:
|
||||
# Connect to IMAP
|
||||
print("Connecting to IMAP...")
|
||||
mail = connect_imap(config)
|
||||
print("✅ Connected")
|
||||
|
||||
# Get unseen emails
|
||||
email_ids = get_unseen_emails(mail)
|
||||
print(f"Found {len(email_ids)} unread emails")
|
||||
|
||||
if not email_ids:
|
||||
print("No new emails to process")
|
||||
mail.logout()
|
||||
return
|
||||
|
||||
# Process each email
|
||||
processed = 0
|
||||
moved_to_trash = 0
|
||||
added_to_pending = 0
|
||||
|
||||
for email_id in email_ids:
|
||||
print(f"\nProcessing email {email_id.decode()}...")
|
||||
|
||||
# Fetch email
|
||||
email_data = fetch_email(mail, email_id)
|
||||
print(f" Subject: {email_data['subject'][:50]}")
|
||||
|
||||
# Analyze with Qwen3 (one call for both ad detection and summary)
|
||||
analysis, summary, is_ad, duration = analyze_with_qwen3(email_data, config)
|
||||
print(f" Analysis: {analysis[:100]}")
|
||||
print(f" Summary: {summary[:60]}...")
|
||||
print(f" Qwen3 time: {duration:.1f}s")
|
||||
|
||||
# Check if analysis was successful (not an error)
|
||||
if 'error -' in analysis.lower():
|
||||
# Analysis failed - keep email unread for retry
|
||||
print(f" -> Analysis failed, keeping unread for retry")
|
||||
log_result(log_file, email_data, analysis, "FAILED_RETRY", duration)
|
||||
# Don't increment processed count - will retry next time
|
||||
continue
|
||||
|
||||
# Analysis successful - determine action
|
||||
if is_ad:
|
||||
print(" -> Moving to Trash")
|
||||
if move_to_trash(mail, email_id):
|
||||
log_result(log_file, email_data, analysis, "MOVED_TO_TRASH", duration)
|
||||
moved_to_trash += 1
|
||||
else:
|
||||
log_result(log_file, email_data, analysis, "MOVE_FAILED", duration)
|
||||
else:
|
||||
# Non-ad email - add to pending queue
|
||||
print(" -> Adding to pending queue")
|
||||
|
||||
# Add to pending
|
||||
msg_internal_id = add_to_pending(
|
||||
email_data,
|
||||
summary,
|
||||
email_id.decode(),
|
||||
email_data.get('recipient', 'youlu@luyanxin.com')
|
||||
)
|
||||
|
||||
# Mark as read (so it won't be processed again)
|
||||
mail.store(email_id, '+FLAGS', '\\Seen')
|
||||
|
||||
log_result(log_file, email_data, analysis, f"ADDED_TO_PENDING ({msg_internal_id})", duration)
|
||||
added_to_pending += 1
|
||||
|
||||
processed += 1
|
||||
|
||||
# Expunge deleted emails
|
||||
mail.expunge()
|
||||
mail.logout()
|
||||
|
||||
# Summary
|
||||
print(f"\n{'='*50}")
|
||||
print(f"Total emails checked: {len(email_ids)}")
|
||||
print(f"Successfully processed: {processed} emails")
|
||||
print(f" - Moved to trash (ads): {moved_to_trash}")
|
||||
print(f" - Added to pending queue: {added_to_pending}")
|
||||
print(f"Failed (will retry next time): {len(email_ids) - processed}")
|
||||
print(f"\n📁 Pending queue: {PENDING_FILE}")
|
||||
print(f"📝 Log: {log_file}")
|
||||
print(f"\n💡 Run 'python process_queue.py' to view and process pending emails")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
28
scripts/email_processor/move_ad_to_trash.py
Normal file
28
scripts/email_processor/move_ad_to_trash.py
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Move specific email to trash"""
|
||||
import imaplib
|
||||
import email
|
||||
|
||||
# Connect
|
||||
mail = imaplib.IMAP4_SSL('imap.migadu.com', 993)
|
||||
mail.login('youlu@luyanxin.com', 'kDkNau2r7m.hV!uk*D4Yr8mC7Dyjx9T')
|
||||
mail.select('INBOX')
|
||||
|
||||
# Search for the email with "10% off" in subject
|
||||
_, search_data = mail.search(None, 'SUBJECT', '"10% off"')
|
||||
email_ids = search_data[0].split()
|
||||
|
||||
print(f"Found {len(email_ids)} emails with '10% off' in subject")
|
||||
|
||||
for email_id in email_ids:
|
||||
# Copy to Trash
|
||||
result = mail.copy(email_id, 'Trash')
|
||||
if result[0] == 'OK':
|
||||
mail.store(email_id, '+FLAGS', '\\Deleted')
|
||||
print(f"✅ Moved email {email_id.decode()} to Trash")
|
||||
else:
|
||||
print(f"❌ Failed to move email {email_id.decode()}")
|
||||
|
||||
mail.expunge()
|
||||
mail.logout()
|
||||
print("Done!")
|
||||
214
scripts/email_processor/process_queue.py
Normal file
214
scripts/email_processor/process_queue.py
Normal file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Email Queue Processor - Handle user commands for pending emails
|
||||
Reads pending_emails.json and executes user commands (archive/keep/reply)
|
||||
"""
|
||||
|
||||
import json
|
||||
import imaplib
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
DATA_FILE = SCRIPT_DIR / "data" / "pending_emails.json"
|
||||
|
||||
def load_pending():
|
||||
"""Load pending emails from JSON file"""
|
||||
if not DATA_FILE.exists():
|
||||
return {}
|
||||
with open(DATA_FILE, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def save_pending(pending):
|
||||
"""Save pending emails to JSON file"""
|
||||
DATA_FILE.parent.mkdir(exist_ok=True)
|
||||
with open(DATA_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(pending, f, indent=2, ensure_ascii=False)
|
||||
|
||||
def connect_imap(config):
|
||||
"""Connect to IMAP server"""
|
||||
mail = imaplib.IMAP4_SSL(config['imap']['host'], config['imap']['port'])
|
||||
mail.login(config['imap']['email'], config['imap']['password'])
|
||||
return mail
|
||||
|
||||
def show_pending_list():
|
||||
"""Display all pending emails"""
|
||||
pending = load_pending()
|
||||
|
||||
if not pending:
|
||||
print("📭 没有待处理的邮件")
|
||||
return
|
||||
|
||||
print(f"\n📧 待处理邮件列表 ({len(pending)} 封)")
|
||||
print("=" * 60)
|
||||
|
||||
# Sort by email_date
|
||||
sorted_items = sorted(
|
||||
pending.items(),
|
||||
key=lambda x: x[1].get('email_date', '')
|
||||
)
|
||||
|
||||
for msg_id, data in sorted_items:
|
||||
if data.get('status') == 'pending':
|
||||
print(f"\n🆔 {msg_id}")
|
||||
print(f" 主题: {data.get('subject', 'N/A')[:50]}")
|
||||
print(f" 发件人: {data.get('sender', 'N/A')}")
|
||||
print(f" 收件人: {data.get('recipient', 'N/A')}")
|
||||
print(f" 时间: {data.get('email_date', 'N/A')}")
|
||||
print(f" 摘要: {data.get('summary', 'N/A')[:80]}")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("\n可用指令:")
|
||||
print(" • 归档 [ID] - 移动到 Archive 文件夹")
|
||||
print(" • 保留 [ID] - 标记已读,留在收件箱")
|
||||
print(" • 删除 [ID] - 移动到 Trash")
|
||||
print(" • 全部处理 - 列出所有并批量操作")
|
||||
|
||||
def archive_email(config, msg_id):
|
||||
"""Archive a specific email by ID"""
|
||||
pending = load_pending()
|
||||
|
||||
if msg_id not in pending:
|
||||
print(f"❌ 未找到邮件 ID: {msg_id}")
|
||||
return False
|
||||
|
||||
email_data = pending[msg_id]
|
||||
uid = email_data.get('imap_uid')
|
||||
|
||||
if not uid:
|
||||
print(f"❌ 邮件 {msg_id} 没有 UID")
|
||||
return False
|
||||
|
||||
try:
|
||||
mail = connect_imap(config)
|
||||
mail.select('INBOX')
|
||||
|
||||
# Copy to Archive
|
||||
result = mail.copy(uid, 'Archive')
|
||||
if result[0] == 'OK':
|
||||
# Mark original as deleted
|
||||
mail.store(uid, '+FLAGS', '\\Deleted')
|
||||
mail.expunge()
|
||||
|
||||
# Update status
|
||||
pending[msg_id]['status'] = 'done'
|
||||
pending[msg_id]['action'] = 'archived'
|
||||
pending[msg_id]['processed_at'] = datetime.now().isoformat()
|
||||
save_pending(pending)
|
||||
|
||||
print(f"✅ 已归档: {email_data.get('subject', 'N/A')[:40]}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ 归档失败: {result}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 错误: {e}")
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
mail.logout()
|
||||
except:
|
||||
pass
|
||||
|
||||
def keep_email(config, msg_id):
|
||||
"""Keep email in inbox, mark as read"""
|
||||
pending = load_pending()
|
||||
|
||||
if msg_id not in pending:
|
||||
print(f"❌ 未找到邮件 ID: {msg_id}")
|
||||
return False
|
||||
|
||||
email_data = pending[msg_id]
|
||||
uid = email_data.get('imap_uid')
|
||||
|
||||
if not uid:
|
||||
print(f"❌ 邮件 {msg_id} 没有 UID")
|
||||
return False
|
||||
|
||||
try:
|
||||
mail = connect_imap(config)
|
||||
mail.select('INBOX')
|
||||
|
||||
# Mark as read (Seen)
|
||||
mail.store(uid, '+FLAGS', '\\Seen')
|
||||
|
||||
# Update status
|
||||
pending[msg_id]['status'] = 'done'
|
||||
pending[msg_id]['action'] = 'kept'
|
||||
pending[msg_id]['processed_at'] = datetime.now().isoformat()
|
||||
save_pending(pending)
|
||||
|
||||
print(f"✅ 已保留: {email_data.get('subject', 'N/A')[:40]}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 错误: {e}")
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
mail.logout()
|
||||
except:
|
||||
pass
|
||||
|
||||
def delete_email(config, msg_id):
|
||||
"""Move email to Trash"""
|
||||
pending = load_pending()
|
||||
|
||||
if msg_id not in pending:
|
||||
print(f"❌ 未找到邮件 ID: {msg_id}")
|
||||
return False
|
||||
|
||||
email_data = pending[msg_id]
|
||||
uid = email_data.get('imap_uid')
|
||||
|
||||
if not uid:
|
||||
print(f"❌ 邮件 {msg_id} 没有 UID")
|
||||
return False
|
||||
|
||||
try:
|
||||
mail = connect_imap(config)
|
||||
mail.select('INBOX')
|
||||
|
||||
# Copy to Trash
|
||||
result = mail.copy(uid, 'Trash')
|
||||
if result[0] == 'OK':
|
||||
mail.store(uid, '+FLAGS', '\\Deleted')
|
||||
mail.expunge()
|
||||
|
||||
# Update status
|
||||
pending[msg_id]['status'] = 'done'
|
||||
pending[msg_id]['action'] = 'deleted'
|
||||
pending[msg_id]['processed_at'] = datetime.now().isoformat()
|
||||
save_pending(pending)
|
||||
|
||||
print(f"✅ 已删除: {email_data.get('subject', 'N/A')[:40]}")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ 删除失败: {result}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 错误: {e}")
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
mail.logout()
|
||||
except:
|
||||
pass
|
||||
|
||||
def main():
|
||||
"""Main function - show pending list"""
|
||||
import json
|
||||
|
||||
# Load config
|
||||
config_file = Path(__file__).parent / "config.json"
|
||||
with open(config_file) as f:
|
||||
config = json.load(f)
|
||||
|
||||
show_pending_list()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
scripts/email_processor/test_single.py
Normal file
38
scripts/email_processor/test_single.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Test single email analysis"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
email_data = {
|
||||
"subject": "Fwd: Get 10% off your next order 🎉",
|
||||
"sender": "crac1017@hotmail.com",
|
||||
"body": "Get 10% off your next order! Limited time offer. Shop now and save!"
|
||||
}
|
||||
|
||||
prompt = f"""Analyze this email and determine if it's an advertisement/promotional email.
|
||||
|
||||
Subject: {email_data['subject']}
|
||||
Sender: {email_data['sender']}
|
||||
Body preview: {email_data['body'][:200]}
|
||||
|
||||
Is this an advertisement or promotional email? Answer with ONLY:
|
||||
- "AD: [brief reason]" if it's an ad/promo
|
||||
- "KEEP: [brief reason]" if it's important/legitimate
|
||||
|
||||
Be conservative - only mark as AD if clearly promotional."""
|
||||
|
||||
print("Sending to Qwen3...")
|
||||
try:
|
||||
response = requests.post(
|
||||
"http://localhost:11434/api/generate",
|
||||
json={
|
||||
"model": "qwen3:4b",
|
||||
"prompt": prompt,
|
||||
"stream": False
|
||||
},
|
||||
timeout=120
|
||||
)
|
||||
result = response.json()
|
||||
print(f"Result: {result.get('response', 'error')}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
1
scripts/email_processor/venv/bin/python
Symbolic link
1
scripts/email_processor/venv/bin/python
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
1
scripts/email_processor/venv/bin/python3
Symbolic link
1
scripts/email_processor/venv/bin/python3
Symbolic link
@@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
scripts/email_processor/venv/bin/python3.12
Symbolic link
1
scripts/email_processor/venv/bin/python3.12
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
1
scripts/email_processor/venv/lib64
Symbolic link
1
scripts/email_processor/venv/lib64
Symbolic link
@@ -0,0 +1 @@
|
||||
lib
|
||||
5
scripts/email_processor/venv/pyvenv.cfg
Normal file
5
scripts/email_processor/venv/pyvenv.cfg
Normal file
@@ -0,0 +1,5 @@
|
||||
home = /usr/bin
|
||||
include-system-site-packages = false
|
||||
version = 3.12.3
|
||||
executable = /usr/bin/python3.12
|
||||
command = /usr/bin/python3 -m venv /home/lyx/.openclaw/workspace/scripts/email_processor/venv
|
||||
130
scripts/ollama_qwen3.py
Normal file
130
scripts/ollama_qwen3.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple Ollama Qwen3 Client
|
||||
A standalone script to query Ollama's Qwen3 model
|
||||
"""
|
||||
|
||||
import ollama
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
|
||||
def query_qwen3(prompt: str, model: str = "qwen3:4b", temperature: float = 0.7, stream: bool = False):
|
||||
"""
|
||||
Send a prompt to Qwen3 and get the response
|
||||
|
||||
Args:
|
||||
prompt: The text prompt to send
|
||||
model: Model name (default: qwen3:4b)
|
||||
temperature: Sampling temperature (0.0-1.0, default: 0.7)
|
||||
stream: Whether to stream the response (default: False)
|
||||
|
||||
Returns:
|
||||
The model's response string
|
||||
"""
|
||||
try:
|
||||
if stream:
|
||||
# Streaming response
|
||||
print("🤖 Qwen3 (streaming):\n", end="", flush=True)
|
||||
full_response = ""
|
||||
for chunk in ollama.generate(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
stream=True,
|
||||
options={'temperature': temperature}
|
||||
):
|
||||
content = chunk.get('response', '')
|
||||
print(content, end="", flush=True)
|
||||
full_response += content
|
||||
print() # Final newline
|
||||
return full_response
|
||||
else:
|
||||
# Non-streaming response
|
||||
response = ollama.generate(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
options={'temperature': temperature}
|
||||
)
|
||||
return response['response']
|
||||
|
||||
except Exception as e:
|
||||
return f"❌ Error: {e}"
|
||||
|
||||
|
||||
def interactive_mode(model: str = "qwen3:4b", temperature: float = 0.7):
|
||||
"""Run in interactive chat mode"""
|
||||
print(f"🤖 Qwen3 Chat Mode ({model})")
|
||||
print("Type 'exit', 'quit', or press Ctrl+C to exit\n")
|
||||
|
||||
while True:
|
||||
try:
|
||||
prompt = input("You: ").strip()
|
||||
if prompt.lower() in ['exit', 'quit', 'q']:
|
||||
print("Goodbye!")
|
||||
break
|
||||
if not prompt:
|
||||
continue
|
||||
|
||||
response = ollama.generate(
|
||||
model=model,
|
||||
prompt=prompt,
|
||||
options={'temperature': temperature}
|
||||
)
|
||||
print(f"\nQwen3: {response['response']}\n")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nGoodbye!")
|
||||
break
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Query Ollama's Qwen3 model",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python ollama_qwen3.py "What is the capital of France?"
|
||||
python ollama_qwen3.py -p "Explain quantum computing" --temp 0.3
|
||||
python ollama_qwen3.py --interactive
|
||||
echo "Hello world" | python ollama_qwen3.py --stdin
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument('prompt', nargs='?', help='The prompt text (optional if using --stdin)')
|
||||
parser.add_argument('-p', '--prompt-file', help='Read prompt from file')
|
||||
parser.add_argument('--model', default='qwen3:4b', help='Model name (default: qwen3:4b)')
|
||||
parser.add_argument('--temp', type=float, default=0.7, help='Temperature 0.0-1.0 (default: 0.7)')
|
||||
parser.add_argument('--stdin', action='store_true', help='Read prompt from stdin')
|
||||
parser.add_argument('--interactive', '-i', action='store_true', help='Interactive chat mode')
|
||||
parser.add_argument('--stream', action='store_true', help='Stream response')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get prompt from various sources
|
||||
if args.interactive:
|
||||
interactive_mode(args.model, args.temp)
|
||||
return
|
||||
|
||||
prompt = ""
|
||||
if args.stdin:
|
||||
prompt = sys.stdin.read().strip()
|
||||
elif args.prompt_file:
|
||||
with open(args.prompt_file, 'r') as f:
|
||||
prompt = f.read().strip()
|
||||
elif args.prompt:
|
||||
prompt = args.prompt
|
||||
|
||||
if not prompt:
|
||||
print("❌ No prompt provided. Use --help for usage information.")
|
||||
sys.exit(1)
|
||||
|
||||
# Query model
|
||||
if args.stream:
|
||||
query_qwen3(prompt, args.model, args.temp, stream=True)
|
||||
else:
|
||||
response = query_qwen3(prompt, args.model, args.temp)
|
||||
print(response)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
221
scripts/reminder_check.py
Normal file
221
scripts/reminder_check.py
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Daily Reminder Checker
|
||||
Reads reminders from markdown table, filters due items, sends notification
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
# Paths
|
||||
BASE_DIR = Path.home() / ".openclaw/workspace/reminders"
|
||||
ACTIVE_FILE = BASE_DIR / "active.md"
|
||||
ARCHIVE_DIR = BASE_DIR / "archive"
|
||||
|
||||
# Priority mapping (lower number = higher priority)
|
||||
PRIORITY_MAP = {
|
||||
'高': 0, 'urgent': 0, 'high': 0,
|
||||
'中': 1, 'normal': 1, 'medium': 1,
|
||||
'低': 2, 'low': 2
|
||||
}
|
||||
|
||||
def parse_table(content):
|
||||
"""Parse markdown table into list of dicts"""
|
||||
lines = content.strip().split('\n')
|
||||
reminders = []
|
||||
|
||||
for line in lines:
|
||||
# Skip header lines and separators
|
||||
if line.startswith('|') and '---' not in line and '事项' not in line:
|
||||
cells = [cell.strip() for cell in line.split('|')[1:-1]]
|
||||
if len(cells) >= 4 and cells[0] and cells[0] != '事项':
|
||||
reminder = {
|
||||
'事项': cells[0],
|
||||
'截止日期': cells[1] if len(cells) > 1 else '',
|
||||
'优先级': cells[2] if len(cells) > 2 else '',
|
||||
'状态': cells[3] if len(cells) > 3 else 'pending',
|
||||
'备注': cells[4] if len(cells) > 4 else ''
|
||||
}
|
||||
reminders.append(reminder)
|
||||
|
||||
return reminders
|
||||
|
||||
def get_default_date():
|
||||
"""Return tomorrow's date as string"""
|
||||
tomorrow = datetime.now() + timedelta(days=1)
|
||||
return tomorrow.strftime('%Y-%m-%d')
|
||||
|
||||
def normalize_reminder(reminder):
|
||||
"""Apply defaults and normalize"""
|
||||
# Default priority
|
||||
if not reminder['优先级']:
|
||||
reminder['优先级'] = '中'
|
||||
|
||||
# Default date
|
||||
if not reminder['截止日期']:
|
||||
reminder['截止日期'] = get_default_date()
|
||||
|
||||
# Normalize status
|
||||
reminder['状态'] = reminder['状态'].lower() if reminder['状态'] else 'pending'
|
||||
|
||||
return reminder
|
||||
|
||||
def get_days_until(due_date_str):
|
||||
"""Calculate days until due date"""
|
||||
try:
|
||||
due_date = datetime.strptime(due_date_str, '%Y-%m-%d')
|
||||
today = datetime.now()
|
||||
delta = (due_date.date() - today.date()).days
|
||||
return delta
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def get_urgency_label(days):
|
||||
"""Get urgency label based on days until due"""
|
||||
if days is None:
|
||||
return "❓ 日期未知"
|
||||
elif days < 0:
|
||||
return f"🔴 逾期 {-days} 天"
|
||||
elif days == 0:
|
||||
return "🔴 今天"
|
||||
elif days == 1:
|
||||
return "🟡 明天"
|
||||
elif days <= 3:
|
||||
return f"🟡 {days} 天后"
|
||||
else:
|
||||
return f"🟢 {days} 天后"
|
||||
|
||||
def sort_reminders(reminders):
|
||||
"""Sort by priority (high first), then by date (earlier first)"""
|
||||
def sort_key(r):
|
||||
priority = PRIORITY_MAP.get(r['优先级'].lower(), 1)
|
||||
try:
|
||||
date = datetime.strptime(r['截止日期'], '%Y-%m-%d')
|
||||
except ValueError:
|
||||
date = datetime.max
|
||||
return (priority, date)
|
||||
|
||||
return sorted(reminders, key=sort_key)
|
||||
|
||||
def format_notification(pending_reminders):
|
||||
"""Format all pending reminders for notification"""
|
||||
if not pending_reminders:
|
||||
return None
|
||||
|
||||
today_str = datetime.now().strftime('%Y-%m-%d')
|
||||
lines = [f"📋 今日待办清单 ({today_str})", "=" * 50]
|
||||
|
||||
# Group by priority
|
||||
groups = {'高': [], '中': [], '低': []}
|
||||
for r in pending_reminders:
|
||||
prio = r['优先级']
|
||||
if prio in groups:
|
||||
groups[prio].append(r)
|
||||
|
||||
# Output high priority
|
||||
if groups['高']:
|
||||
lines.append("\n🔴 高优先级:")
|
||||
for r in groups['高']:
|
||||
days = get_days_until(r['截止日期'])
|
||||
urgency = get_urgency_label(days)
|
||||
note = f" | {r['备注']}" if r['备注'] else ""
|
||||
lines.append(f" • {r['事项']} ({urgency}){note}")
|
||||
|
||||
# Output medium priority
|
||||
if groups['中']:
|
||||
lines.append("\n🟡 中优先级:")
|
||||
for r in groups['中']:
|
||||
days = get_days_until(r['截止日期'])
|
||||
urgency = get_urgency_label(days)
|
||||
note = f" | {r['备注']}" if r['备注'] else ""
|
||||
lines.append(f" • {r['事项']} ({urgency}){note}")
|
||||
|
||||
# Output low priority
|
||||
if groups['低']:
|
||||
lines.append("\n🟢 低优先级:")
|
||||
for r in groups['低']:
|
||||
days = get_days_until(r['截止日期'])
|
||||
urgency = get_urgency_label(days)
|
||||
note = f" | {r['备注']}" if r['备注'] else ""
|
||||
lines.append(f" • {r['事项']} ({urgency}){note}")
|
||||
|
||||
lines.append("\n" + "=" * 50)
|
||||
lines.append("📝 完成事项后请修改状态为 done")
|
||||
lines.append("📁 管理文件: ~/.openclaw/workspace/reminders/active.md")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
def archive_done_reminders(reminders):
|
||||
"""Move done reminders to archive"""
|
||||
done = [r for r in reminders if r['状态'] == 'done']
|
||||
if not done:
|
||||
return
|
||||
|
||||
# Create archive filename with current quarter
|
||||
now = datetime.now()
|
||||
quarter = (now.month - 1) // 3 + 1
|
||||
archive_file = ARCHIVE_DIR / f"{now.year}-Q{quarter}.md"
|
||||
|
||||
# Append to archive
|
||||
with open(archive_file, 'a', encoding='utf-8') as f:
|
||||
for r in done:
|
||||
f.write(f"| {r['事项']} | {r['截止日期']} | {r['优先级']} | done | {r['备注']} |\n")
|
||||
|
||||
def update_active_file(reminders):
|
||||
"""Rewrite active file without done items"""
|
||||
pending = [r for r in reminders if r['状态'] != 'done']
|
||||
|
||||
with open(ACTIVE_FILE, 'w', encoding='utf-8') as f:
|
||||
f.write("# 提醒事项表\n\n")
|
||||
f.write("## 待办事项(Pending)\n\n")
|
||||
f.write("| 事项 | 截止日期 | 优先级 | 状态 | 备注 |\n")
|
||||
f.write("|------|----------|--------|------|------|\n")
|
||||
|
||||
for r in pending:
|
||||
f.write(f"| {r['事项']} | {r['截止日期']} | {r['优先级']} | {r['状态']} | {r['备注']} |\n")
|
||||
|
||||
f.write("\n## 使用说明\n\n")
|
||||
f.write("1. **添加事项**:在表格中新增一行\n")
|
||||
f.write("2. **截止日期**:格式 YYYY-MM-DD,空着默认为明天\n")
|
||||
f.write("3. **优先级**:高/中/低,空着默认为中\n")
|
||||
f.write("4. **状态**:pending(待办)/ done(已完成)\n")
|
||||
f.write("5. **每天早上8:00自动检查**,到期事项会通知你\n\n")
|
||||
f.write("## 已完成归档\n\n")
|
||||
f.write("已完成的事项会自动移动到 archive/ 目录\n")
|
||||
|
||||
def main():
|
||||
"""Main function - show all pending reminders as todo list"""
|
||||
# Check if file exists
|
||||
if not ACTIVE_FILE.exists():
|
||||
print("No reminders file found")
|
||||
return
|
||||
|
||||
# Read and parse
|
||||
with open(ACTIVE_FILE, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
reminders = parse_table(content)
|
||||
|
||||
# Normalize and filter for pending only
|
||||
reminders = [normalize_reminder(r) for r in reminders]
|
||||
pending_reminders = [r for r in reminders if r['状态'] == 'pending']
|
||||
|
||||
if not pending_reminders:
|
||||
# No pending reminders - silent
|
||||
return
|
||||
|
||||
# Sort and format
|
||||
pending_reminders = sort_reminders(pending_reminders)
|
||||
notification = format_notification(pending_reminders)
|
||||
|
||||
if notification:
|
||||
print(notification)
|
||||
|
||||
# Archive done items (optional - uncomment if you want auto-archive)
|
||||
# archive_done_reminders(reminders)
|
||||
# update_active_file(reminders)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
193
scripts/ucla_pilates_monitor.py
Normal file
193
scripts/ucla_pilates_monitor.py
Normal file
@@ -0,0 +1,193 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
UCLA Reformer Pilates Course Monitor - Date-aware Version
|
||||
Only reports courses that are NOT "Full" AND not yet started/expired
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
from datetime import datetime
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
# Course URLs to monitor
|
||||
COURSES = {
|
||||
"Reformer Pilates (Enrolled)": "https://secure.recreation.ucla.edu/Program/GetProgramDetails?courseId=d7adf66a-d3a6-46d6-96c7-54e4c015dcf1",
|
||||
"Reformer Pilates (Standby)": "https://secure.recreation.ucla.edu/Program/GetProgramDetails?courseId=7abbf877-f1cf-4ddc-a0ef-690ff935b39a"
|
||||
}
|
||||
|
||||
# Sections to exclude (time doesn't work for us)
|
||||
EXCLUDE_SECTIONS = [
|
||||
"Sec 16B", # Wednesday 12:00pm - not available
|
||||
"Sec 19B", # Friday 12:00pm - not available
|
||||
]
|
||||
|
||||
def should_exclude(text):
|
||||
"""Check if course should be excluded based on section/time"""
|
||||
for exclude in EXCLUDE_SECTIONS:
|
||||
if exclude in text:
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse_date_range(text):
|
||||
"""Extract date range from course text like (1/5-2/6) or (2/13-3/13)"""
|
||||
# Match patterns like (1/5-2/6) or (2/13-3/13)
|
||||
match = re.search(r'\((\d{1,2})/(\d{1,2})-(\d{1,2})/(\d{1,2})\)', text)
|
||||
if match:
|
||||
start_month, start_day, end_month, end_day = match.groups()
|
||||
current_year = datetime.now().year
|
||||
try:
|
||||
start_date = datetime(current_year, int(start_month), int(start_day))
|
||||
end_date = datetime(current_year, int(end_month), int(end_day))
|
||||
return start_date, end_date
|
||||
except ValueError:
|
||||
return None, None
|
||||
return None, None
|
||||
|
||||
def is_course_active(start_date, end_date):
|
||||
"""Check if course is still active (not yet ended)"""
|
||||
if not end_date:
|
||||
return True # Can't parse date, assume active
|
||||
today = datetime.now()
|
||||
# Course is active if it hasn't ended yet (give 1 day buffer)
|
||||
return end_date >= today
|
||||
|
||||
def is_valid_course_entry(text):
|
||||
"""Check if text is a valid course entry (not description/no-offering text)"""
|
||||
text_lower = text.lower()
|
||||
|
||||
# Exclude these patterns
|
||||
exclude_patterns = [
|
||||
"there are no offerings available",
|
||||
"to view the class times",
|
||||
"please visit the",
|
||||
"this standby pass is valid",
|
||||
"instructor:",
|
||||
"reformer pilates - standby pass", # Header text
|
||||
"×", # Close button
|
||||
]
|
||||
|
||||
for pattern in exclude_patterns:
|
||||
if pattern in text_lower:
|
||||
return False
|
||||
|
||||
# Must contain course identifier (Sec X or Session)
|
||||
has_course_id = bool(re.search(r'(Sec \d+[A-Z]|Session [A-Z])', text))
|
||||
|
||||
# Must contain price or day/time info
|
||||
has_info = bool(re.search(r'(\$\d+|[MTWTF]{1,2},? \d{1,2}:\d{2})', text))
|
||||
|
||||
return has_course_id and has_info
|
||||
|
||||
async def check_course(page, name, url):
|
||||
"""Check a single course page, return available sections"""
|
||||
available = []
|
||||
|
||||
try:
|
||||
await page.goto(url, wait_until="networkidle", timeout=30000)
|
||||
await page.wait_for_selector("text=Offerings", timeout=10000)
|
||||
|
||||
# Get all semester tabs
|
||||
semesters = await page.query_selector_all("[role='tab']")
|
||||
|
||||
for semester in semesters:
|
||||
sem_name = await semester.inner_text()
|
||||
sem_name = sem_name.strip()
|
||||
|
||||
await semester.click()
|
||||
await page.wait_for_timeout(1000)
|
||||
|
||||
# Find all course sections
|
||||
sections = await page.query_selector_all(".offering-item, [class*='offering'], .card, .list-group-item, tr")
|
||||
|
||||
for section in sections:
|
||||
try:
|
||||
text = await section.inner_text()
|
||||
if not text or len(text) < 30:
|
||||
continue
|
||||
|
||||
text_lower = text.lower()
|
||||
|
||||
# Check if it's NOT full
|
||||
is_full = "full" in text_lower
|
||||
if is_full:
|
||||
continue
|
||||
|
||||
# Check if it's a valid course entry
|
||||
if not is_valid_course_entry(text):
|
||||
continue
|
||||
|
||||
# Check if excluded (time doesn't work)
|
||||
if should_exclude(text):
|
||||
continue
|
||||
|
||||
# Check date range
|
||||
start_date, end_date = parse_date_range(text)
|
||||
if not is_course_active(start_date, end_date):
|
||||
continue # Course has ended
|
||||
|
||||
# Extract clean info
|
||||
# Remove extra whitespace and truncate
|
||||
lines = [line.strip() for line in text.strip().split('\n') if line.strip()]
|
||||
info = ' | '.join(lines[:3]) # First 3 lines max
|
||||
info = info[:200] # Limit length
|
||||
|
||||
# Format dates nicely
|
||||
if start_date and end_date:
|
||||
date_str = f"{start_date.strftime('%m/%d')}-{end_date.strftime('%m/%d')}"
|
||||
else:
|
||||
date_str = ""
|
||||
|
||||
available.append({
|
||||
'semester': sem_name,
|
||||
'info': info,
|
||||
'dates': date_str,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date
|
||||
})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
return [{'error': f"Error checking {name}: {e}"}]
|
||||
|
||||
return available
|
||||
|
||||
async def main():
|
||||
"""Main function - only output available and active courses"""
|
||||
all_available = []
|
||||
today_str = datetime.now().strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
async with async_playwright() as p:
|
||||
browser = await p.chromium.launch(headless=True)
|
||||
page = await browser.new_page()
|
||||
await page.set_viewport_size({"width": 1280, "height": 800})
|
||||
|
||||
for name, url in COURSES.items():
|
||||
available = await check_course(page, name, url)
|
||||
if available and not any('error' in str(item) for item in available):
|
||||
all_available.append((name, available))
|
||||
|
||||
await browser.close()
|
||||
|
||||
# Only print if there are available courses
|
||||
if all_available:
|
||||
print(f"🚨 UCLA Pilates - Available Courses ({today_str})")
|
||||
print("=" * 60)
|
||||
|
||||
for name, courses in all_available:
|
||||
print(f"\n📋 {name}:")
|
||||
for course in courses:
|
||||
# Format: [Winter 2026] 📅 02/11-03/11
|
||||
date_str = f"📅 {course['dates']}" if course['dates'] else ""
|
||||
print(f" ✅ [{course['semester']}] {date_str}")
|
||||
print(f" {course['info']}")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("👉 Enroll at: https://secure.recreation.ucla.edu")
|
||||
else:
|
||||
# No available courses - silent
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
Reference in New Issue
Block a user