Use the Graph API

Status: ACTIVE (pulled from docs.langchain.com) Source: https://docs.langchain.com/oss/python/langgraph/use-graph-api Timestamp: 2026-05-11

Practical how-to guide for the Graph API. See Graph API Overview for concepts.

Define and Update State

from typing_extensions import TypedDict
from langchain.messages import AnyMessage

class State(TypedDict):
    messages: list[AnyMessage]
    extra_field: int

def node(state: State):
    messages = state["messages"]
    new_message = AIMessage("Hello!")
    return {"messages": messages + [new_message], "extra_field": 10}

Reducers

Each state key can have its own reducer:

from typing import Annotated
from langgraph.graph.message import add_messages

class State(TypedDict):
    messages: Annotated[list[AnyMessage], add_messages]
    extra_field: int  # Default: overwrite

MessagesState

Built-in state with add_messages reducer:

from langgraph.graph import MessagesState

class State(MessagesState):
    documents: list[str]

Bypass Reducers with Overwrite

from langgraph.types import Overwrite

def replace_messages(state: State):
    return {"messages": Overwrite(["replacement message"])}

Input and Output Schemas

class InputState(TypedDict):
    question: str

class OutputState(TypedDict):
    answer: str

class OverallState(InputState, OutputState):
    pass

graph = StateGraph(OverallState, input_schema=InputState, output_schema=OutputState)

Private State Between Nodes

class Node1Output(TypedDict):
    private_data: str

def node_1(state: OverallState) -> Node1Output:
    return {"private_data": "set by node_1"}

class Node2Input(TypedDict):
    private_data: str

def node_2(state: Node2Input) -> OverallState:
    return {"a": "set by node_2"}

Pydantic Models for State

from pydantic import BaseModel

class OverallState(BaseModel):
    a: str

Runtime Configuration

from langgraph.runtime import Runtime

class ContextSchema(TypedDict):
    llm_provider: str = "openai"

def node(state: State, runtime: Runtime[ContextSchema]):
    llm = get_llm(runtime.context.llm_provider)
    ...

graph = StateGraph(State, context_schema=ContextSchema)
graph.invoke(inputs, context={"llm_provider": "anthropic"})

Retry Policies

from langgraph.types import RetryPolicy

builder.add_node("node_name", node_function, retry_policy=RetryPolicy(max_attempts=3))

Node Timeouts (async-only)

builder.add_node("model", call_model, timeout=1.0)

Error Handling

from langgraph.errors import NodeError
from langgraph.types import Command

def payment_error_handler(state: State, error: NodeError) -> Command:
    return Command(update={"status": f"compensated: {error.error}"}, goto="finalize")

Access Execution Info

def my_node(state: State, runtime: Runtime) -> State:
    thread_id = runtime.execution_info.thread_id
    attempt = runtime.execution_info.node_attempt
    ...

Node Caching

from langgraph.types import CachePolicy
from langgraph.cache.memory import InMemoryCache

builder.add_node("expensive", expensive_node, cache_policy=CachePolicy(ttl=120))
graph = builder.compile(cache=InMemoryCache())

Sequences (Linear Flow)

builder = StateGraph(State)
builder.add_node(step_1)
builder.add_node(step_2)
builder.add_node(step_3)
builder.add_edge(START, "step_1")
builder.add_edge("step_1", "step_2")
builder.add_edge("step_2", "step_3")

Branches (Parallel Execution)

builder.add_edge(START, "call_llm_1")
builder.add_edge(START, "call_llm_2")
builder.add_edge(START, "call_llm_3")
builder.add_edge("call_llm_1", "aggregator")
builder.add_edge("call_llm_2", "aggregator")
builder.add_edge("call_llm_3", "aggregator")

Send API (Map-Reduce)

from langgraph.types import Send
from typing import Annotated
import operator

class OverallState(TypedDict):
    subjects: list[str]
    jokes: Annotated[list[str], operator.add]

def continue_to_jokes(state: OverallState):
    return [Send("generate_joke", {"subject": s}) for s in state["subjects"]]

Command (State Updates + Routing)

from langgraph.types import Command
from typing import Literal

def my_node(state: State) -> Command[Literal["my_other_node"]]:
    return Command(update={"foo": "bar"}, goto="my_other_node")

Navigate to Parent Graph

def my_node(state: State) -> Command[Literal["other_subgraph"]]:
    return Command(
        update={"foo": "bar"},
        goto="other_subgraph",
        graph=Command.PARENT
    )

Visualize Your Graph

from IPython.display import Image, display

display(Image(graph.get_graph().draw_mermaid_png()))