feat: Implement Phase 2 Feed Formats - ATOM, JSON Feed, RSS fix (Phases 2.0-2.3)
This commit implements the first three phases of v1.1.2 Phase 2 Feed Formats, adding ATOM 1.0 and JSON Feed 1.1 support alongside the existing RSS feed. CRITICAL BUG FIX: - Fixed RSS streaming feed ordering (was showing oldest-first instead of newest-first) - Streaming RSS removed incorrect reversed() call at line 198 - Feedgen RSS kept correct reversed() to compensate for library behavior NEW FEATURES: - ATOM 1.0 feed generation (RFC 4287 compliant) - Proper XML namespacing and RFC 3339 dates - Streaming and non-streaming methods - 11 comprehensive tests - JSON Feed 1.1 generation (JSON Feed spec compliant) - RFC 3339 dates and UTF-8 JSON output - Custom _starpunk extension with permalink_path and word_count - 13 comprehensive tests REFACTORING: - Restructured feed code into starpunk/feeds/ module - feeds/rss.py - RSS 2.0 (moved from feed.py) - feeds/atom.py - ATOM 1.0 (new) - feeds/json_feed.py - JSON Feed 1.1 (new) - Backward compatible feed.py shim for existing imports - Business metrics integrated into all feed generators TESTING: - Created shared test helper tests/helpers/feed_ordering.py - Helper validates newest-first ordering across all formats - 48 total feed tests, all passing - RSS: 24 tests - ATOM: 11 tests - JSON Feed: 13 tests FILES CHANGED: - Modified: starpunk/feed.py (now compatibility shim) - New: starpunk/feeds/ module with rss.py, atom.py, json_feed.py - New: tests/helpers/feed_ordering.py (shared test helper) - New: tests/test_feeds_atom.py, tests/test_feeds_json.py - Modified: CHANGELOG.md (Phase 2 entries) - New: docs/reports/2025-11-26-v1.1.2-phase2-feed-formats-partial.md NEXT STEPS: Phase 2.4 (Content Negotiation) pending - will add /feed endpoint with Accept header negotiation and explicit format endpoints. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
1
tests/helpers/__init__.py
Normal file
1
tests/helpers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Test helpers for StarPunk
|
||||
145
tests/helpers/feed_ordering.py
Normal file
145
tests/helpers/feed_ordering.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Shared test helper for verifying feed ordering across all formats
|
||||
|
||||
This module provides utilities to verify that feed items are in the correct
|
||||
order (newest first) regardless of feed format (RSS, ATOM, JSON Feed).
|
||||
"""
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
import json
|
||||
from email.utils import parsedate_to_datetime
|
||||
|
||||
|
||||
def assert_feed_newest_first(feed_content, format_type='rss', expected_count=None):
|
||||
"""
|
||||
Verify feed items are in newest-first order
|
||||
|
||||
Args:
|
||||
feed_content: Feed content as string (XML for RSS/ATOM, JSON string for JSON Feed)
|
||||
format_type: Feed format ('rss', 'atom', or 'json')
|
||||
expected_count: Optional expected number of items (for validation)
|
||||
|
||||
Raises:
|
||||
AssertionError: If items are not in newest-first order or count mismatch
|
||||
|
||||
Examples:
|
||||
>>> feed_xml = generate_rss_feed(notes)
|
||||
>>> assert_feed_newest_first(feed_xml, 'rss', expected_count=10)
|
||||
|
||||
>>> feed_json = generate_json_feed(notes)
|
||||
>>> assert_feed_newest_first(feed_json, 'json')
|
||||
"""
|
||||
if format_type == 'rss':
|
||||
dates = _extract_rss_dates(feed_content)
|
||||
elif format_type == 'atom':
|
||||
dates = _extract_atom_dates(feed_content)
|
||||
elif format_type == 'json':
|
||||
dates = _extract_json_feed_dates(feed_content)
|
||||
else:
|
||||
raise ValueError(f"Unsupported format type: {format_type}")
|
||||
|
||||
# Verify expected count if provided
|
||||
if expected_count is not None:
|
||||
assert len(dates) == expected_count, \
|
||||
f"Expected {expected_count} items but found {len(dates)}"
|
||||
|
||||
# Verify items are not empty
|
||||
assert len(dates) > 0, "Feed contains no items"
|
||||
|
||||
# Verify dates are in descending order (newest first)
|
||||
for i in range(len(dates) - 1):
|
||||
current = dates[i]
|
||||
next_item = dates[i + 1]
|
||||
|
||||
assert current >= next_item, \
|
||||
f"Item {i} (date: {current}) should be newer than or equal to item {i+1} (date: {next_item}). " \
|
||||
f"Feed items are not in newest-first order!"
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _extract_rss_dates(feed_xml):
|
||||
"""
|
||||
Extract publication dates from RSS feed
|
||||
|
||||
Args:
|
||||
feed_xml: RSS feed XML string
|
||||
|
||||
Returns:
|
||||
List of datetime objects in feed order
|
||||
"""
|
||||
root = ET.fromstring(feed_xml)
|
||||
|
||||
# Find all item elements
|
||||
items = root.findall('.//item')
|
||||
|
||||
dates = []
|
||||
for item in items:
|
||||
pub_date_elem = item.find('pubDate')
|
||||
if pub_date_elem is not None and pub_date_elem.text:
|
||||
# Parse RFC-822 date format
|
||||
dt = parsedate_to_datetime(pub_date_elem.text)
|
||||
dates.append(dt)
|
||||
|
||||
return dates
|
||||
|
||||
|
||||
def _extract_atom_dates(feed_xml):
|
||||
"""
|
||||
Extract published/updated dates from ATOM feed
|
||||
|
||||
Args:
|
||||
feed_xml: ATOM feed XML string
|
||||
|
||||
Returns:
|
||||
List of datetime objects in feed order
|
||||
"""
|
||||
# Parse ATOM namespace
|
||||
root = ET.fromstring(feed_xml)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
|
||||
# Find all entry elements
|
||||
entries = root.findall('.//atom:entry', ns)
|
||||
|
||||
dates = []
|
||||
for entry in entries:
|
||||
# Try published first, fall back to updated
|
||||
published = entry.find('atom:published', ns)
|
||||
updated = entry.find('atom:updated', ns)
|
||||
|
||||
date_elem = published if published is not None else updated
|
||||
|
||||
if date_elem is not None and date_elem.text:
|
||||
# Parse RFC 3339 (ISO 8601) date format
|
||||
dt = datetime.fromisoformat(date_elem.text.replace('Z', '+00:00'))
|
||||
dates.append(dt)
|
||||
|
||||
return dates
|
||||
|
||||
|
||||
def _extract_json_feed_dates(feed_json):
|
||||
"""
|
||||
Extract publication dates from JSON Feed
|
||||
|
||||
Args:
|
||||
feed_json: JSON Feed string
|
||||
|
||||
Returns:
|
||||
List of datetime objects in feed order
|
||||
"""
|
||||
feed_data = json.loads(feed_json)
|
||||
|
||||
items = feed_data.get('items', [])
|
||||
|
||||
dates = []
|
||||
for item in items:
|
||||
# JSON Feed uses date_published (RFC 3339)
|
||||
date_str = item.get('date_published')
|
||||
|
||||
if date_str:
|
||||
# Parse RFC 3339 (ISO 8601) date format
|
||||
dt = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
|
||||
dates.append(dt)
|
||||
|
||||
return dates
|
||||
@@ -23,6 +23,7 @@ from starpunk.feed import (
|
||||
)
|
||||
from starpunk.notes import create_note
|
||||
from starpunk.models import Note
|
||||
from tests.helpers.feed_ordering import assert_feed_newest_first
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -134,7 +135,7 @@ class TestGenerateFeed:
|
||||
assert len(items) == 3
|
||||
|
||||
def test_generate_feed_newest_first(self, app):
|
||||
"""Test feed displays notes in newest-first order"""
|
||||
"""Test feed displays notes in newest-first order (regression test for v1.1.2)"""
|
||||
with app.app_context():
|
||||
# Create notes with distinct timestamps (oldest to newest in creation order)
|
||||
import time
|
||||
@@ -161,6 +162,10 @@ class TestGenerateFeed:
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
# Use shared helper to verify ordering
|
||||
assert_feed_newest_first(feed_xml, format_type='rss', expected_count=3)
|
||||
|
||||
# Also verify manually with XML parsing
|
||||
root = ET.fromstring(feed_xml)
|
||||
channel = root.find("channel")
|
||||
items = channel.findall("item")
|
||||
|
||||
306
tests/test_feeds_atom.py
Normal file
306
tests/test_feeds_atom.py
Normal file
@@ -0,0 +1,306 @@
|
||||
"""
|
||||
Tests for ATOM feed generation module
|
||||
|
||||
Tests cover:
|
||||
- ATOM feed generation with various note counts
|
||||
- RFC 3339 date formatting
|
||||
- Feed structure and required elements
|
||||
- Entry ordering (newest first)
|
||||
- XML escaping
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
from xml.etree import ElementTree as ET
|
||||
import time
|
||||
|
||||
from starpunk import create_app
|
||||
from starpunk.feeds.atom import generate_atom, generate_atom_streaming
|
||||
from starpunk.notes import create_note, list_notes
|
||||
from tests.helpers.feed_ordering import assert_feed_newest_first
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(tmp_path):
|
||||
"""Create test application"""
|
||||
test_data_dir = tmp_path / "data"
|
||||
test_data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
test_config = {
|
||||
"TESTING": True,
|
||||
"DATABASE_PATH": test_data_dir / "starpunk.db",
|
||||
"DATA_PATH": test_data_dir,
|
||||
"NOTES_PATH": test_data_dir / "notes",
|
||||
"SESSION_SECRET": "test-secret-key",
|
||||
"ADMIN_ME": "https://test.example.com",
|
||||
"SITE_URL": "https://example.com",
|
||||
"SITE_NAME": "Test Blog",
|
||||
"SITE_DESCRIPTION": "A test blog",
|
||||
"DEV_MODE": False,
|
||||
}
|
||||
app = create_app(config=test_config)
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_notes(app):
|
||||
"""Create sample published notes"""
|
||||
with app.app_context():
|
||||
notes = []
|
||||
for i in range(5):
|
||||
note = create_note(
|
||||
content=f"# Test Note {i}\n\nThis is test content for note {i}.",
|
||||
published=True,
|
||||
)
|
||||
notes.append(note)
|
||||
time.sleep(0.01) # Ensure distinct timestamps
|
||||
return list_notes(published_only=True, limit=10)
|
||||
|
||||
|
||||
class TestGenerateAtom:
|
||||
"""Test generate_atom() function"""
|
||||
|
||||
def test_generate_atom_basic(self, app, sample_notes):
|
||||
"""Test basic ATOM feed generation with notes"""
|
||||
with app.app_context():
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
)
|
||||
|
||||
# Should return XML string
|
||||
assert isinstance(feed_xml, str)
|
||||
assert feed_xml.startswith("<?xml")
|
||||
|
||||
# Parse XML to verify structure
|
||||
root = ET.fromstring(feed_xml)
|
||||
|
||||
# Check namespace
|
||||
assert root.tag == "{http://www.w3.org/2005/Atom}feed"
|
||||
|
||||
# Find required feed elements (with namespace)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
title = root.find('atom:title', ns)
|
||||
assert title is not None
|
||||
assert title.text == "Test Blog"
|
||||
|
||||
id_elem = root.find('atom:id', ns)
|
||||
assert id_elem is not None
|
||||
|
||||
updated = root.find('atom:updated', ns)
|
||||
assert updated is not None
|
||||
|
||||
# Check entries (should have 5 entries)
|
||||
entries = root.findall('atom:entry', ns)
|
||||
assert len(entries) == 5
|
||||
|
||||
def test_generate_atom_empty(self, app):
|
||||
"""Test ATOM feed generation with no notes"""
|
||||
with app.app_context():
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
# Should still generate valid XML
|
||||
assert isinstance(feed_xml, str)
|
||||
root = ET.fromstring(feed_xml)
|
||||
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entries = root.findall('atom:entry', ns)
|
||||
assert len(entries) == 0
|
||||
|
||||
def test_generate_atom_respects_limit(self, app, sample_notes):
|
||||
"""Test ATOM feed respects entry limit"""
|
||||
with app.app_context():
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
limit=3,
|
||||
)
|
||||
|
||||
root = ET.fromstring(feed_xml)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entries = root.findall('atom:entry', ns)
|
||||
|
||||
# Should only have 3 entries (respecting limit)
|
||||
assert len(entries) == 3
|
||||
|
||||
def test_generate_atom_newest_first(self, app):
|
||||
"""Test ATOM feed displays notes in newest-first order"""
|
||||
with app.app_context():
|
||||
# Create notes with distinct timestamps
|
||||
for i in range(3):
|
||||
create_note(
|
||||
content=f"# Note {i}\n\nContent {i}.",
|
||||
published=True,
|
||||
)
|
||||
time.sleep(0.01)
|
||||
|
||||
# Get notes from database (should be DESC = newest first)
|
||||
notes = list_notes(published_only=True, limit=10)
|
||||
|
||||
# Generate feed
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
# Use shared helper to verify ordering
|
||||
assert_feed_newest_first(feed_xml, format_type='atom', expected_count=3)
|
||||
|
||||
# Also verify manually with XML parsing
|
||||
root = ET.fromstring(feed_xml)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entries = root.findall('atom:entry', ns)
|
||||
|
||||
# First entry should be newest (Note 2)
|
||||
# Last entry should be oldest (Note 0)
|
||||
first_title = entries[0].find('atom:title', ns).text
|
||||
last_title = entries[-1].find('atom:title', ns).text
|
||||
|
||||
assert "Note 2" in first_title
|
||||
assert "Note 0" in last_title
|
||||
|
||||
def test_generate_atom_requires_site_url(self):
|
||||
"""Test ATOM feed generation requires site_url"""
|
||||
with pytest.raises(ValueError, match="site_url is required"):
|
||||
generate_atom(
|
||||
site_url="",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
def test_generate_atom_requires_site_name(self):
|
||||
"""Test ATOM feed generation requires site_name"""
|
||||
with pytest.raises(ValueError, match="site_name is required"):
|
||||
generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
def test_generate_atom_entry_structure(self, app, sample_notes):
|
||||
"""Test individual ATOM entry has all required elements"""
|
||||
with app.app_context():
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes[:1],
|
||||
)
|
||||
|
||||
root = ET.fromstring(feed_xml)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entry = root.find('atom:entry', ns)
|
||||
|
||||
# Check required entry elements
|
||||
assert entry.find('atom:id', ns) is not None
|
||||
assert entry.find('atom:title', ns) is not None
|
||||
assert entry.find('atom:updated', ns) is not None
|
||||
assert entry.find('atom:published', ns) is not None
|
||||
assert entry.find('atom:content', ns) is not None
|
||||
assert entry.find('atom:link', ns) is not None
|
||||
|
||||
def test_generate_atom_html_content(self, app):
|
||||
"""Test ATOM feed includes HTML content properly escaped"""
|
||||
with app.app_context():
|
||||
note = create_note(
|
||||
content="# Test\n\nThis is **bold** and *italic*.",
|
||||
published=True,
|
||||
)
|
||||
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[note],
|
||||
)
|
||||
|
||||
root = ET.fromstring(feed_xml)
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entry = root.find('atom:entry', ns)
|
||||
content = entry.find('atom:content', ns)
|
||||
|
||||
# Should have type="html"
|
||||
assert content.get('type') == 'html'
|
||||
|
||||
# Content should contain escaped HTML
|
||||
content_text = content.text
|
||||
assert "<" in content_text or "<strong>" in content_text
|
||||
|
||||
def test_generate_atom_xml_escaping(self, app):
|
||||
"""Test ATOM feed escapes special XML characters"""
|
||||
with app.app_context():
|
||||
note = create_note(
|
||||
content="# Test & Special <Characters>\n\nContent with 'quotes' and \"doubles\".",
|
||||
published=True,
|
||||
)
|
||||
|
||||
feed_xml = generate_atom(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog & More",
|
||||
site_description="A test <blog>",
|
||||
notes=[note],
|
||||
)
|
||||
|
||||
# Should produce valid XML (no parse errors)
|
||||
root = ET.fromstring(feed_xml)
|
||||
assert root is not None
|
||||
|
||||
# Check title is properly escaped in XML
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
title = root.find('atom:title', ns)
|
||||
assert title.text == "Test Blog & More"
|
||||
|
||||
|
||||
class TestGenerateAtomStreaming:
|
||||
"""Test generate_atom_streaming() function"""
|
||||
|
||||
def test_generate_atom_streaming_basic(self, app, sample_notes):
|
||||
"""Test streaming ATOM feed generation"""
|
||||
with app.app_context():
|
||||
generator = generate_atom_streaming(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
)
|
||||
|
||||
# Collect all chunks
|
||||
chunks = list(generator)
|
||||
assert len(chunks) > 0
|
||||
|
||||
# Join and verify valid XML
|
||||
feed_xml = ''.join(chunks)
|
||||
root = ET.fromstring(feed_xml)
|
||||
|
||||
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
||||
entries = root.findall('atom:entry', ns)
|
||||
assert len(entries) == 5
|
||||
|
||||
def test_generate_atom_streaming_yields_chunks(self, app, sample_notes):
|
||||
"""Test streaming yields multiple chunks"""
|
||||
with app.app_context():
|
||||
generator = generate_atom_streaming(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
limit=3,
|
||||
)
|
||||
|
||||
chunks = list(generator)
|
||||
|
||||
# Should have multiple chunks (at least XML declaration + feed + entries + closing)
|
||||
assert len(chunks) >= 4
|
||||
314
tests/test_feeds_json.py
Normal file
314
tests/test_feeds_json.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""
|
||||
Tests for JSON Feed generation module
|
||||
|
||||
Tests cover:
|
||||
- JSON Feed generation with various note counts
|
||||
- RFC 3339 date formatting
|
||||
- Feed structure and required fields
|
||||
- Entry ordering (newest first)
|
||||
- JSON validity
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
import json
|
||||
import time
|
||||
|
||||
from starpunk import create_app
|
||||
from starpunk.feeds.json_feed import generate_json_feed, generate_json_feed_streaming
|
||||
from starpunk.notes import create_note, list_notes
|
||||
from tests.helpers.feed_ordering import assert_feed_newest_first
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(tmp_path):
|
||||
"""Create test application"""
|
||||
test_data_dir = tmp_path / "data"
|
||||
test_data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
test_config = {
|
||||
"TESTING": True,
|
||||
"DATABASE_PATH": test_data_dir / "starpunk.db",
|
||||
"DATA_PATH": test_data_dir,
|
||||
"NOTES_PATH": test_data_dir / "notes",
|
||||
"SESSION_SECRET": "test-secret-key",
|
||||
"ADMIN_ME": "https://test.example.com",
|
||||
"SITE_URL": "https://example.com",
|
||||
"SITE_NAME": "Test Blog",
|
||||
"SITE_DESCRIPTION": "A test blog",
|
||||
"DEV_MODE": False,
|
||||
}
|
||||
app = create_app(config=test_config)
|
||||
yield app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_notes(app):
|
||||
"""Create sample published notes"""
|
||||
with app.app_context():
|
||||
notes = []
|
||||
for i in range(5):
|
||||
note = create_note(
|
||||
content=f"# Test Note {i}\n\nThis is test content for note {i}.",
|
||||
published=True,
|
||||
)
|
||||
notes.append(note)
|
||||
time.sleep(0.01) # Ensure distinct timestamps
|
||||
return list_notes(published_only=True, limit=10)
|
||||
|
||||
|
||||
class TestGenerateJsonFeed:
|
||||
"""Test generate_json_feed() function"""
|
||||
|
||||
def test_generate_json_feed_basic(self, app, sample_notes):
|
||||
"""Test basic JSON Feed generation with notes"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
)
|
||||
|
||||
# Should return JSON string
|
||||
assert isinstance(feed_json, str)
|
||||
|
||||
# Parse JSON to verify structure
|
||||
feed = json.loads(feed_json)
|
||||
|
||||
# Check required fields
|
||||
assert feed["version"] == "https://jsonfeed.org/version/1.1"
|
||||
assert feed["title"] == "Test Blog"
|
||||
assert "items" in feed
|
||||
assert isinstance(feed["items"], list)
|
||||
|
||||
# Check items (should have 5 items)
|
||||
assert len(feed["items"]) == 5
|
||||
|
||||
def test_generate_json_feed_empty(self, app):
|
||||
"""Test JSON Feed generation with no notes"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
# Should still generate valid JSON
|
||||
feed = json.loads(feed_json)
|
||||
assert feed["items"] == []
|
||||
|
||||
def test_generate_json_feed_respects_limit(self, app, sample_notes):
|
||||
"""Test JSON Feed respects item limit"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
limit=3,
|
||||
)
|
||||
|
||||
feed = json.loads(feed_json)
|
||||
|
||||
# Should only have 3 items (respecting limit)
|
||||
assert len(feed["items"]) == 3
|
||||
|
||||
def test_generate_json_feed_newest_first(self, app):
|
||||
"""Test JSON Feed displays notes in newest-first order"""
|
||||
with app.app_context():
|
||||
# Create notes with distinct timestamps
|
||||
for i in range(3):
|
||||
create_note(
|
||||
content=f"# Note {i}\n\nContent {i}.",
|
||||
published=True,
|
||||
)
|
||||
time.sleep(0.01)
|
||||
|
||||
# Get notes from database (should be DESC = newest first)
|
||||
notes = list_notes(published_only=True, limit=10)
|
||||
|
||||
# Generate feed
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
# Use shared helper to verify ordering
|
||||
assert_feed_newest_first(feed_json, format_type='json', expected_count=3)
|
||||
|
||||
# Also verify manually with JSON parsing
|
||||
feed = json.loads(feed_json)
|
||||
items = feed["items"]
|
||||
|
||||
# First item should be newest (Note 2)
|
||||
# Last item should be oldest (Note 0)
|
||||
assert "Note 2" in items[0]["title"]
|
||||
assert "Note 0" in items[-1]["title"]
|
||||
|
||||
def test_generate_json_feed_requires_site_url(self):
|
||||
"""Test JSON Feed generation requires site_url"""
|
||||
with pytest.raises(ValueError, match="site_url is required"):
|
||||
generate_json_feed(
|
||||
site_url="",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
def test_generate_json_feed_requires_site_name(self):
|
||||
"""Test JSON Feed generation requires site_name"""
|
||||
with pytest.raises(ValueError, match="site_name is required"):
|
||||
generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="",
|
||||
site_description="A test blog",
|
||||
notes=[],
|
||||
)
|
||||
|
||||
def test_generate_json_feed_item_structure(self, app, sample_notes):
|
||||
"""Test individual JSON Feed item has all required fields"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes[:1],
|
||||
)
|
||||
|
||||
feed = json.loads(feed_json)
|
||||
item = feed["items"][0]
|
||||
|
||||
# Check required item fields
|
||||
assert "id" in item
|
||||
assert "url" in item
|
||||
assert "title" in item
|
||||
assert "date_published" in item
|
||||
|
||||
# Check either content_html or content_text is present
|
||||
assert "content_html" in item or "content_text" in item
|
||||
|
||||
def test_generate_json_feed_html_content(self, app):
|
||||
"""Test JSON Feed includes HTML content"""
|
||||
with app.app_context():
|
||||
note = create_note(
|
||||
content="# Test\n\nThis is **bold** and *italic*.",
|
||||
published=True,
|
||||
)
|
||||
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=[note],
|
||||
)
|
||||
|
||||
feed = json.loads(feed_json)
|
||||
item = feed["items"][0]
|
||||
|
||||
# Should have content_html
|
||||
assert "content_html" in item
|
||||
content = item["content_html"]
|
||||
|
||||
# Should contain HTML tags
|
||||
assert "<strong>" in content or "<em>" in content
|
||||
|
||||
def test_generate_json_feed_starpunk_extension(self, app, sample_notes):
|
||||
"""Test JSON Feed includes StarPunk custom extension"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes[:1],
|
||||
)
|
||||
|
||||
feed = json.loads(feed_json)
|
||||
item = feed["items"][0]
|
||||
|
||||
# Should have _starpunk extension
|
||||
assert "_starpunk" in item
|
||||
assert "permalink_path" in item["_starpunk"]
|
||||
assert "word_count" in item["_starpunk"]
|
||||
|
||||
def test_generate_json_feed_date_format(self, app, sample_notes):
|
||||
"""Test JSON Feed uses RFC 3339 date format"""
|
||||
with app.app_context():
|
||||
feed_json = generate_json_feed(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes[:1],
|
||||
)
|
||||
|
||||
feed = json.loads(feed_json)
|
||||
item = feed["items"][0]
|
||||
|
||||
# date_published should be in RFC 3339 format
|
||||
date_str = item["date_published"]
|
||||
|
||||
# Should end with 'Z' for UTC or have timezone offset
|
||||
assert date_str.endswith("Z") or "+" in date_str or "-" in date_str[-6:]
|
||||
|
||||
# Should be parseable as ISO 8601
|
||||
parsed = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
||||
assert parsed.tzinfo is not None
|
||||
|
||||
|
||||
class TestGenerateJsonFeedStreaming:
|
||||
"""Test generate_json_feed_streaming() function"""
|
||||
|
||||
def test_generate_json_feed_streaming_basic(self, app, sample_notes):
|
||||
"""Test streaming JSON Feed generation"""
|
||||
with app.app_context():
|
||||
generator = generate_json_feed_streaming(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
)
|
||||
|
||||
# Collect all chunks
|
||||
chunks = list(generator)
|
||||
assert len(chunks) > 0
|
||||
|
||||
# Join and verify valid JSON
|
||||
feed_json = ''.join(chunks)
|
||||
feed = json.loads(feed_json)
|
||||
|
||||
assert len(feed["items"]) == 5
|
||||
|
||||
def test_generate_json_feed_streaming_yields_chunks(self, app, sample_notes):
|
||||
"""Test streaming yields multiple chunks"""
|
||||
with app.app_context():
|
||||
generator = generate_json_feed_streaming(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
limit=3,
|
||||
)
|
||||
|
||||
chunks = list(generator)
|
||||
|
||||
# Should have multiple chunks (at least opening + items + closing)
|
||||
assert len(chunks) >= 3
|
||||
|
||||
def test_generate_json_feed_streaming_valid_json(self, app, sample_notes):
|
||||
"""Test streaming produces valid JSON"""
|
||||
with app.app_context():
|
||||
generator = generate_json_feed_streaming(
|
||||
site_url="https://example.com",
|
||||
site_name="Test Blog",
|
||||
site_description="A test blog",
|
||||
notes=sample_notes,
|
||||
)
|
||||
|
||||
feed_json = ''.join(generator)
|
||||
|
||||
# Should be valid JSON
|
||||
feed = json.loads(feed_json)
|
||||
assert feed["version"] == "https://jsonfeed.org/version/1.1"
|
||||
Reference in New Issue
Block a user