Skip to content

Commit 9c4f685

Browse files
authored
fix 'Decorating non-class Functions' example (#65)
1 parent 4bd97e8 commit 9c4f685

File tree

1 file changed

+14
-7
lines changed
  • docs/docs/module_guides/workflow

1 file changed

+14
-7
lines changed

docs/docs/module_guides/workflow/index.md

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -645,19 +645,23 @@ You can also decorate and attach steps to a workflow without subclassing it.
645645
Below is the `JokeFlow` from earlier, but defined without subclassing.
646646

647647
```python
648-
from workflows import Workflow, step
649-
from workflows.events import Event, StartEvent, StopEvent
648+
from llama_index.core.workflow import (
649+
Event,
650+
StartEvent,
651+
StopEvent,
652+
Workflow,
653+
step,
654+
)
650655
from llama_index.llms.openai import OpenAI
651656

652657

653658
class JokeEvent(Event):
654659
joke: str
655660

661+
class JokeFlow(Workflow):
662+
pass
656663

657-
joke_flow = Workflow(timeout=60, verbose=True)
658-
659-
660-
@step(workflow=joke_flow)
664+
@step(workflow=JokeFlow)
661665
async def generate_joke(ev: StartEvent) -> JokeEvent:
662666
topic = ev.topic
663667

@@ -668,7 +672,7 @@ async def generate_joke(ev: StartEvent) -> JokeEvent:
668672
return JokeEvent(joke=str(response))
669673

670674

671-
@step(workflow=joke_flow)
675+
@step(workflow=JokeFlow)
672676
async def critique_joke(ev: JokeEvent) -> StopEvent:
673677
joke = ev.joke
674678

@@ -677,6 +681,9 @@ async def critique_joke(ev: JokeEvent) -> StopEvent:
677681
)
678682
response = await llm.acomplete(prompt)
679683
return StopEvent(result=str(response))
684+
685+
686+
joke_flow = JokeFlow(timeout=60, verbose=True)
680687
```
681688

682689
## Maintaining Context Across Runs

0 commit comments

Comments
 (0)