Find answers from the community

a
alvin
Offline, last seen 3 months ago
Joined September 25, 2024
Hi This global context appears to be broken. Any help workaround ?

Plain Text
from llama_index.core.workflow import (
    Context,
    Workflow,
    StartEvent,
    StopEvent,
    step,
)
import asyncio  # Import asyncio to run the async functions


class WaitExampleFlow(Workflow):
    @step
    async def setup(self, ctx: Context, ev: StartEvent) -> StopEvent:
        if hasattr(ev, "data"):
            ctx.data["some_database"] = ev.data

        return StopEvent(result=None)

    @step
    async def query(self, ctx: Context, ev: StartEvent) -> StopEvent | None:
        if hasattr(ev, "query"):
            # do we have any data?
            if "some_database" in ctx.data:
                data = ctx.data["some_database"]
                return StopEvent(result=f"Got the data {data}")
            else:
                # there's non data yet
                return None
        else:
            # this isn't a query
            return None


async def main():
    w = WaitExampleFlow(verbose=True)
    result = await w.run(query="Can I kick it?")
    if result is None:
        print("No you can't")
    print("---")
    result = await w.run(data="Yes you can")
    print("---")
    result = await w.run(query="Can I kick it?")
    print(result)


if __name__ == "__main__":
    asyncio.run(main())
7 comments
L
a