diff --git a/guides/fundamentals/custom-frame-processor.mdx b/guides/fundamentals/custom-frame-processor.mdx
index 6511e583..8677e8ed 100644
--- a/guides/fundamentals/custom-frame-processor.mdx
+++ b/guides/fundamentals/custom-frame-processor.mdx
@@ -85,7 +85,7 @@ With this positioning, the `MetricsFrameLogger` FrameProcessor will receive ever
## Key Requirements
-FrameProcessors must inherit from the base `FrameProcessor` class. This ensures that your custom FrameProcessor will correctly handle frames like `StartFrame`, `EndFrame`, `StartInterruptionFrame` without having to write custom logic for those frames. This inheritance also provides it with the ability to `process_frame()` and `push_frame()`:
+FrameProcessors must inherit from the base `FrameProcessor` class. This ensures that your custom FrameProcessor will correctly handle frames like `StartFrame`, `EndFrame`, `InterruptionFrame` without having to write custom logic for those frames. This inheritance also provides it with the ability to `process_frame()` and `push_frame()`:
- **`process_frame()`** is what allows the FrameProcessor to receive frames and add custom conditional logic based on the frames that are received.
- **`push_frame()`** allows the FrameProcessor to push frames to the pipeline. Normally, frames are pushed DOWNSTREAM, but based on which processors need the output, you can also push UPSTREAM or in both directions.
diff --git a/guides/learn/pipeline.mdx b/guides/learn/pipeline.mdx
index 186d2384..35f7994c 100644
--- a/guides/learn/pipeline.mdx
+++ b/guides/learn/pipeline.mdx
@@ -84,7 +84,7 @@ class ControlFrame(Frame):
# SystemFrames (processed immediately)
InputAudioRawFrame # User audio input
UserStartedSpeakingFrame # Speech detection events
-StartInterruptionFrame # Interruption control
+InterruptionFrame # Interruption control
ErrorFrame # Error notifications
# DataFrames (queued and ordered)
diff --git a/server/pipeline/heartbeats.mdx b/server/pipeline/heartbeats.mdx
index a1fcd720..534e6fcc 100644
--- a/server/pipeline/heartbeats.mdx
+++ b/server/pipeline/heartbeats.mdx
@@ -26,7 +26,7 @@ When heartbeats are enabled:
1. The pipeline sends a `HeartbeatFrame` every second
2. The frame traverses through all processors in the pipeline, from source to sink
3. The pipeline monitors how long it takes for heartbeat frames to complete their journey
-4. If a heartbeat frame isn't received within 5 seconds, a warning is logged
+4. If a heartbeat frame isn't received within 10 seconds, a warning is logged
## Monitoring Output
@@ -52,11 +52,11 @@ Heartbeat monitoring is useful for:
## Configuration
-The heartbeat system uses two key timing constants:
+The heartbeat system uses two timing values:
-- `HEARTBEAT_SECONDS = 1.0` - Interval between heartbeat frames
-- `HEARTBEAT_MONITOR_SECONDS = 10.0` - Time before warning if no heartbeat received
+- **Interval** (default 1.0s) — how often heartbeat frames are sent. Configurable via `heartbeats_period_secs` in `PipelineParams`.
+- **Monitor window** (10x the interval) — how long to wait before logging a warning if no heartbeat is received.
- These values are currently fixed but may be configurable in future versions.
+ The heartbeat interval is configurable via the `heartbeats_period_secs` parameter in `PipelineParams`. The monitor window is always 10x the interval.
diff --git a/server/pipeline/pipeline-params.mdx b/server/pipeline/pipeline-params.mdx
index 18f5f3b5..07d11e78 100644
--- a/server/pipeline/pipeline-params.mdx
+++ b/server/pipeline/pipeline-params.mdx
@@ -28,7 +28,7 @@ task = PipelineTask(pipeline, params=params)
## Available Parameters
-
+
DEPRECATED: This parameter is deprecated. Configure interruption behavior
via [User Turn
diff --git a/server/pipeline/pipeline-task.mdx b/server/pipeline/pipeline-task.mdx
index 7a3a8322..f7fbe631 100644
--- a/server/pipeline/pipeline-task.mdx
+++ b/server/pipeline/pipeline-task.mdx
@@ -66,7 +66,7 @@ await runner.run(task)
Frame types that should prevent the pipeline from being considered idle. See
[Pipeline Idle Detection](/server/pipeline/pipeline-idle-detection) for
@@ -84,7 +84,7 @@ await runner.run(task)
guide](/server/utilities/opentelemetry) for details.
-
+
Whether to enable turn tracking. See [The OpenTelemetry
guide](/server/utilities/opentelemetry) for details.
@@ -108,10 +108,11 @@ await runner.run(task)
### Task Lifecycle Management
-Starts and manages the pipeline execution until completion or cancellation.
+Starts and manages the pipeline execution until completion or cancellation. Typically called via `PipelineRunner` rather than directly:
```python
-await task.run()
+runner = PipelineRunner()
+await runner.run(task)
```
@@ -163,7 +164,7 @@ Downstream frames are pushed from the beginning of the pipeline. Upstream frames
await task.queue_frame(TTSSpeakFrame("Hello!"))
# Push a frame upstream from the end of the pipeline
-from pipecat.frames.frames import FrameDirection
+from pipecat.processors.frame_processor import FrameDirection
await task.queue_frame(UserStoppedSpeakingFrame(), direction=FrameDirection.UPSTREAM)
```
@@ -188,7 +189,7 @@ frames = [TTSSpeakFrame("Hello!"), TTSSpeakFrame("How are you?")]
await task.queue_frames(frames)
# Push frames upstream from the end of the pipeline
-from pipecat.frames.frames import FrameDirection
+from pipecat.processors.frame_processor import FrameDirection
frames = [TranscriptionFrame("user input"), UserStoppedSpeakingFrame()]
await task.queue_frames(frames, direction=FrameDirection.UPSTREAM)
```
diff --git a/server/utilities/dtmf-aggregator.mdx b/server/utilities/dtmf-aggregator.mdx
index 9f8ba386..fd06fb28 100644
--- a/server/utilities/dtmf-aggregator.mdx
+++ b/server/utilities/dtmf-aggregator.mdx
@@ -41,10 +41,6 @@ aggregator = DTMFAggregator(
Contains a single keypad button press with a KeypadEntry value
-
- Flushes any pending aggregation when user interruption begins
-
-
Flushes pending aggregation and stops the aggregation task
@@ -73,7 +69,7 @@ The aggregator flushes (emits a TranscriptionFrame) when:
1. **Termination digit**: The configured termination digit is pressed (default: `#`)
2. **Timeout**: No new digits received within the timeout period (default: 2 seconds)
-3. **Interruption**: A `StartInterruptionFrame` is received
+3. **Interruption**: A `InterruptionFrame` is received
4. **Pipeline end**: An `EndFrame` is received
## Usage Examples
@@ -138,12 +134,12 @@ Respond appropriately to both voice and keypad input."""
## Sequence Examples
-| User Input | Aggregation Trigger | Output TranscriptionFrame |
-| ------------------ | ---------------------- | ------------------------- |
-| `1`, `2`, `3`, `#` | Termination digit | `"DTMF: 123#"` |
-| `*`, `0` | 2-second timeout | `"DTMF: *0"` |
-| `5`, interruption | StartInterruptionFrame | `"DTMF: 5"` |
-| `9`, `9`, EndFrame | Pipeline shutdown | `"DTMF: 99"` |
+| User Input | Aggregation Trigger | Output TranscriptionFrame |
+| ------------------ | ------------------- | ------------------------- |
+| `1`, `2`, `3`, `#` | Termination digit | `"DTMF: 123#"` |
+| `*`, `0` | 2-second timeout | `"DTMF: *0"` |
+| `5`, interruption | InterruptionFrame | `"DTMF: 5"` |
+| `9`, `9`, EndFrame | Pipeline shutdown | `"DTMF: 99"` |
## Frame Flow
diff --git a/server/utilities/filters/stt-mute.mdx b/server/utilities/filters/stt-mute.mdx
index 4b9af9e1..b8057bf2 100644
--- a/server/utilities/filters/stt-mute.mdx
+++ b/server/utilities/filters/stt-mute.mdx
@@ -102,7 +102,7 @@ The processor is configured using `STTMuteConfig`, which determines when and how
Indicates an interim transcription result (suppressed when muted)
-
+
User interruption start event (suppressed when muted)
diff --git a/server/utilities/observers/debug-observer.mdx b/server/utilities/observers/debug-observer.mdx
index 5513cb48..3a36171d 100644
--- a/server/utilities/observers/debug-observer.mdx
+++ b/server/utilities/observers/debug-observer.mdx
@@ -57,7 +57,7 @@ task = PipelineTask(
Filter frames based on their type and source/destination:
```python
-from pipecat.frames.frames import StartInterruptionFrame, UserStartedSpeakingFrame, LLMTextFrame
+from pipecat.frames.frames import InterruptionFrame, UserStartedSpeakingFrame, LLMTextFrame
from pipecat.observers.loggers.debug_log_observer import DebugLogObserver, FrameEndpoint
from pipecat.transports.base_output_transport import BaseOutputTransport
from pipecat.services.stt_service import STTService
@@ -67,8 +67,8 @@ task = PipelineTask(
params=PipelineParams(
observers=[
DebugLogObserver(frame_types={
- # Only log StartInterruptionFrame when source is BaseOutputTransport
- StartInterruptionFrame: (BaseOutputTransport, FrameEndpoint.SOURCE),
+ # Only log InterruptionFrame when source is BaseOutputTransport
+ InterruptionFrame: (BaseOutputTransport, FrameEndpoint.SOURCE),
# Only log UserStartedSpeakingFrame when destination is STTService
UserStartedSpeakingFrame: (STTService, FrameEndpoint.DESTINATION),
diff --git a/server/utilities/observers/observer-pattern.mdx b/server/utilities/observers/observer-pattern.mdx
index a5fd0860..586227e0 100644
--- a/server/utilities/observers/observer-pattern.mdx
+++ b/server/utilities/observers/observer-pattern.mdx
@@ -68,7 +68,7 @@ Here's an example observer that logs interruptions and bot speaking events:
```python
from pipecat.observers.base_observer import BaseObserver, FramePushed, FrameProcessed
from pipecat.frames.frames import (
- StartInterruptionFrame,
+ InterruptionFrame,
BotStartedSpeakingFrame,
BotStoppedSpeakingFrame,
)
@@ -79,7 +79,7 @@ class DebugObserver(BaseObserver):
"""Observer to log interruptions and bot speaking events to the console.
Logs all frame instances of:
- - StartInterruptionFrame
+ - InterruptionFrame
- BotStartedSpeakingFrame
- BotStoppedSpeakingFrame
@@ -91,7 +91,7 @@ class DebugObserver(BaseObserver):
time_sec = data.timestamp / 1_000_000_000
arrow = "→" if data.direction == FrameDirection.DOWNSTREAM else "←"
- if isinstance(data.frame, StartInterruptionFrame):
+ if isinstance(data.frame, InterruptionFrame):
logger.info(f"⚡ INTERRUPTION START: {data.source} {arrow} {data.destination} at {time_sec:.2f}s")
elif isinstance(data.frame, BotStartedSpeakingFrame):
logger.info(f"🤖 BOT START SPEAKING: {data.source} {arrow} {data.destination} at {time_sec:.2f}s")