Bedrock Agentcore
Amazon Bedrock AgentCore is an agentic platform to build, deploy, and operate highly capable agents securely at scale.
AgentCore is comprised of fully-managed services that can be used together or independently and work with any framework (including CrewAI, LangGraph, LlamaIndex, Google ADK, OpenAI Agents SDK, and Strands Agents), as well as any foundation model - eliminating the choice between open-source flexibility and enterprise-grade security and reliability.
The following example shows implementing a strands agent deployed on Agentcore.
Install
pip install --force-reinstall -U -r requirements.txt --quietSetup
Now we'll configure OpenTelemetry with our StrandsToOpenInferenceProcessor. This processor is responsible for converting Strands telemetry data to the OpenInference format that Arize AI can understand and visualize.
The processor handles:
Converting Strands span kinds to OpenInference span kinds (LLM, TOOL, AGENT, CHAIN)
Mapping Strands attributes to OpenInference attributes
Creating a hierarchical graph structure for visualization
Preserving important metadata like token usage and model information
Agent Implementation
Now that you have tracing setup, all Strands Agent requests will be streamed to Arize AX for observability and evaluation. Example below
%%writefile strands_claude.py
import os
import logging
from bedrock_agentcore.runtime import BedrockAgentCoreApp
from strands import Agent, tool
from strands.models import BedrockModel
from strands.telemetry import StrandsTelemetry
from ddgs import DDGS
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.sdk.resources import Resource
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from strands_to_openinference_mapping import StrandsToOpenInferenceProcessor
strands_processor = StrandsToOpenInferenceProcessor()
resource = Resource.create({
"model_id": "agentcore-strands-agent", ### <-- Update with your Arize Project Name
})
provider = TracerProvider(resource=resource)
provider.add_span_processor(strands_processor)
otel_exporter = OTLPSpanExporter()
provider.add_span_processor(BatchSpanProcessor(otel_exporter))
trace.set_tracer_provider(provider)
logging.basicConfig(level=logging.ERROR, format="[%(levelname)s] %(message)s")
logger = logging.getLogger(__name__)
logger.setLevel(os.getenv("AGENT_RUNTIME_LOG_LEVEL", "INFO").upper())
@tool
def web_search(query: str) -> str:
"""
Search the web for information using DuckDuckGo.
Args:
query: The search query
Returns:
A string containing the search results
"""
try:
ddgs = DDGS()
results = ddgs.text(query, max_results=5)
formatted_results = []
for i, result in enumerate(results, 1):
formatted_results.append(
f"{i}. {result.get('title', 'No title')}\n"
f" {result.get('body', 'No summary')}\n"
f" Source: {result.get('href', 'No URL')}\n"
)
return "\n".join(formatted_results) if formatted_results else "No results found."
except Exception as e:
return f"Error searching the web: {str(e)}"
# Function to initialize Bedrock model
def get_bedrock_model():
region = os.getenv("AWS_DEFAULT_REGION", "us-west-2")
model_id = os.getenv("BEDROCK_MODEL_ID", "us.anthropic.claude-3-7-sonnet-20250219-v1:0")
bedrock_model = BedrockModel(
model_id=model_id,
region_name=region,
temperature=0.0,
max_tokens=1024
)
return bedrock_model
# Initialize the Bedrock model
bedrock_model = get_bedrock_model()
# Define the agent's system prompt
system_prompt = """You are an experienced travel agent specializing in personalized travel recommendations
with access to real-time web information. Your role is to find dream destinations matching user preferences
using web search for current information. You should provide comprehensive recommendations with current
information, brief descriptions, and practical travel details."""
app = BedrockAgentCoreApp()
def initialize_agent():
"""Initialize the agent with proper telemetry configuration."""
# Create and cache the agent
agent = Agent(
model=bedrock_model,
system_prompt=system_prompt,
tools=[web_search]
)
return agent
@app.entrypoint
def strands_agent_bedrock(payload, context=None):
"""
Invoke the agent with a payload
"""
user_input = payload.get("prompt")
logger.info("[%s] User input: %s", context.session_id, user_input)
# Initialize agent with proper configuration
agent = initialize_agent()
response = agent(user_input)
return response.message['content'][0]['text']
if __name__ == "__main__":
app.run()Configure AgentCore Runtime deployment
from bedrock_agentcore_starter_toolkit import Runtime
from boto3.session import Session
boto_session = Session()
region = boto_session.region_name
agentcore_runtime = Runtime()
agent_name = "strands_agentcore_arize_observability"
response = agentcore_runtime.configure(
entrypoint="strands_claude.py",
auto_create_execution_role=True,
auto_create_ecr=True,
requirements_file="requirements.txt",
region=region,
agent_name=agent_name,
memory_mode='NO_MEMORY',
disable_otel=True,
)
responseDeploy to AgentCore Runtime
# Set the Space and API keys as headers for authentication
import os
headers = f"space_id={os.environ["ARIZE_SPACE_ID"]},api_key={os.environ["ARIZE_API_KEY"]}"
launch_result = agentcore_runtime.launch(
env_vars={
"BEDROCK_MODEL_ID": "us.anthropic.claude-3-7-sonnet-20250219-v1:0", # Example model ID
"OTEL_EXPORTER_OTLP_ENDPOINT": os.environ["ARIZE_ENDPOINT"], # Use Arize OTEL endpoint
"OTEL_EXPORTER_OTLP_HEADERS": headers, # Add Arize OTEL auth header
"DISABLE_ADOT_OBSERVABILITY": "true", # Disable CloudWatch Observability
}
)
launch_resultInvoking AgentCore Runtime
invoke_response = agentcore_runtime.invoke({"prompt": "I'm planning a weekend trip to los angeles. What are the must-visit places and local food I should try?"})Resources
Arize + Bedrock Agent Core Tutorial - Complete example using Strands Agent on AgentCore with Arize AX tracing
Last updated
Was this helpful?

