@@ -645,19 +645,23 @@ You can also decorate and attach steps to a workflow without subclassing it.
645
645
Below is the ` JokeFlow ` from earlier, but defined without subclassing.
646
646
647
647
``` python
648
- from workflows import Workflow, step
649
- from workflows.events import Event, StartEvent, StopEvent
648
+ from llama_index.core.workflow import (
649
+ Event,
650
+ StartEvent,
651
+ StopEvent,
652
+ Workflow,
653
+ step,
654
+ )
650
655
from llama_index.llms.openai import OpenAI
651
656
652
657
653
658
class JokeEvent (Event ):
654
659
joke: str
655
660
661
+ class JokeFlow (Workflow ):
662
+ pass
656
663
657
- joke_flow = Workflow(timeout = 60 , verbose = True )
658
-
659
-
660
- @step (workflow = joke_flow)
664
+ @step (workflow = JokeFlow)
661
665
async def generate_joke (ev : StartEvent) -> JokeEvent:
662
666
topic = ev.topic
663
667
@@ -668,7 +672,7 @@ async def generate_joke(ev: StartEvent) -> JokeEvent:
668
672
return JokeEvent(joke = str (response))
669
673
670
674
671
- @step (workflow = joke_flow )
675
+ @step (workflow = JokeFlow )
672
676
async def critique_joke (ev : JokeEvent) -> StopEvent:
673
677
joke = ev.joke
674
678
@@ -677,6 +681,9 @@ async def critique_joke(ev: JokeEvent) -> StopEvent:
677
681
)
678
682
response = await llm.acomplete(prompt)
679
683
return StopEvent(result = str (response))
684
+
685
+
686
+ joke_flow = JokeFlow(timeout = 60 , verbose = True )
680
687
```
681
688
682
689
## Maintaining Context Across Runs
0 commit comments