import yaml import os import base64 from smolagents import GradioUI, CodeAgent, HfApiModel # Get current directory path CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) from tools.web_search import DuckDuckGoSearchTool as WebSearch from tools.visit_webpage import VisitWebpageTool as VisitWebpage from tools.suggest_menu import SimpleTool as SuggestMenu from tools.catering_service_tool import SimpleTool as CateringServiceTool from tools.superhero_party_theme_generator import SuperheroPartyThemeTool as SuperheroPartyThemeGenerator from tools.final_answer import FinalAnswerTool as FinalAnswer # Add OpenTelemetry instrumentation from opentelemetry.sdk.trace import TracerProvider from openinference.instrumentation.smolagents import SmolagentsInstrumentor from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.trace.export import SimpleSpanProcessor # Setup tracing if environment variables are present if "LANGFUSE_PUBLIC_KEY" in os.environ and "LANGFUSE_SECRET_KEY" in os.environ: # Prepare authentication LANGFUSE_PUBLIC_KEY = os.environ.get("LANGFUSE_PUBLIC_KEY") LANGFUSE_SECRET_KEY = os.environ.get("LANGFUSE_SECRET_KEY") LANGFUSE_AUTH = base64.b64encode(f"{LANGFUSE_PUBLIC_KEY}:{LANGFUSE_SECRET_KEY}".encode()).decode() # Set OTEL environment variables os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://cloud.langfuse.com/api/public/otel" # EU region os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}" # Setup tracing trace_provider = TracerProvider() trace_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter())) SmolagentsInstrumentor().instrument(tracer_provider=trace_provider) print("Langfuse tracing enabled") CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) model = HfApiModel( model_id='Qwen/Qwen2.5-Coder-32B-Instruct', ) web_search = WebSearch() visit_webpage = VisitWebpage() suggest_menu = SuggestMenu() catering_service_tool = CateringServiceTool() superhero_party_theme_generator = SuperheroPartyThemeGenerator() final_answer = FinalAnswer() with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream: prompt_templates = yaml.safe_load(stream) agent = CodeAgent( model=model, tools=[web_search, visit_webpage, suggest_menu, catering_service_tool, superhero_party_theme_generator], managed_agents=[], max_steps=10, verbosity_level=2, grammar=None, planning_interval=None, name=None, description=None, executor_type='local', executor_kwargs={}, max_print_outputs_length=None, prompt_templates=prompt_templates ) if __name__ == "__main__": GradioUI(agent).launch()