Use the Functional API

Status: ACTIVE (pulled from docs.langchain.com) Source: https://docs.langchain.com/oss/python/langgraph/use-functional-api Timestamp: 2026-05-11

Practical how-to guide for the Functional API. See Functional API Overview for concepts.

Creating a Simple Workflow

from langgraph.func import entrypoint, task
from langgraph.checkpoint.memory import InMemorySaver

@task
def is_even(number: int) -> bool:
    return number % 2 == 0

@task
def format_message(is_even: bool) -> str:
    return "The number is even." if is_even else "The number is odd."

checkpointer = InMemorySaver()

@entrypoint(checkpointer=checkpointer)
def workflow(inputs: dict) -> str:
    even = is_even(inputs["number"]).result()
    return format_message(even).result()

config = {"configurable": {"thread_id": "1"}}
result = workflow.invoke({"number": 7}, config=config)

Compose an Essay with an LLM

model = init_chat_model('gpt-3.5-turbo')

@task
def compose_essay(topic: str) -> str:
    return model.invoke([
        {"role": "system", "content": "You write essays."},
        {"role": "user", "content": f"Write an essay about {topic}."}
    ]).content

@entrypoint(checkpointer=checkpointer)
def workflow(topic: str) -> str:
    return compose_essay(topic).result()

Parallel Execution

@entrypoint(checkpointer=checkpointer)
def workflow(topics: list[str]) -> str:
    futures = [generate_paragraph(topic) for topic in topics]
    paragraphs = [f.result() for f in futures]
    return "\n\n".join(paragraphs)

Chatbot with Memory

@entrypoint(checkpointer=checkpointer)
def workflow(inputs: list[BaseMessage], *, previous: list[BaseMessage]):
    if previous:
        inputs = add_messages(previous, inputs)
    response = call_model(inputs).result()
    return entrypoint.final(value=response, save=add_messages(inputs, response))

config = {"configurable": {"thread_id": "1"}}
input_message = {"role": "user", "content": "hi! I'm bob"}
for chunk in workflow.stream([input_message], config, stream_mode="values"):
    chunk.pretty_print()

input_message = {"role": "user", "content": "what's my name?"}
for chunk in workflow.stream([input_message], config, stream_mode="values"):
    chunk.pretty_print()

entrypoint.final Pattern

@entrypoint(checkpointer=checkpointer)
def accumulate(n: int, *, previous: int | None) -> entrypoint.final[int, int]:
    previous = previous or 0
    total = previous + n
    return entrypoint.final(value=previous, save=total)

print(accumulate.invoke(1, config=config))  # 0 (previous before this call)
print(accumulate.invoke(2, config=config))  # 1
print(accumulate.invoke(3, config=config))  # 3