From 14bb1e6629ea766538f42898844c7d705899b190 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Tue, 11 Nov 2025 15:05:34 -0500 Subject: [PATCH 01/17] adding gen_ai --- .../lib/opentelemetry/semantic_conventions.rb | 9 +++ traceloop-sdk/lib/traceloop/sdk.rb | 58 +++++++++++++++---- traceloop-sdk/traceloop-sdk.gemspec | 4 +- 3 files changed, 59 insertions(+), 12 deletions(-) diff --git a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb index 891eaf9..c482232 100644 --- a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb +++ b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb @@ -30,6 +30,15 @@ module SpanAttributes # Deprecated TRACELOOP_CORRELATION_ID = "traceloop.correlation.id" + + # Gen AI + GEN_AI_REQUEST_MODEL = "gen_ai.request.model" + GEN_AI_RESPONSE_MODEL = "gen_ai.response.model" + GEN_AI_USAGE_COMPLETION_TOKENS = "gen_ai.usage.completion_tokens" + GEN_AI_USAGE_PROMPT_TOKENS = "gen_ai.usage.prompt_tokens" + GEN_AI_COMPLETIONS = "gen_ai.completion" + GEN_AI_PROMPTS = "gen_ai.prompt" + GEN_AI_SYSTEM = "gen_ai.system" end module LLMRequestTypeValues diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 640be53..a9e5f9f 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -11,11 +11,13 @@ def initialize OpenTelemetry::SDK::Trace::Export::SimpleSpanProcessor.new( OpenTelemetry::Exporter::OTLP::Exporter.new( endpoint: "#{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}/v1/traces", - headers: { "Authorization" => "Bearer #{ENV.fetch("TRACELOOP_API_KEY")}" } + headers: { + Authorization: "#{ENV.fetch("TRACELOOP_AUTH_SCHEME", "Bearer")} #{ENV.fetch("TRACELOOP_API_KEY")}" + } ) ) ) - puts "Traceloop exporting traces to #{ENV.fetch("TRACELOOP_BASE", "https://api.traceloop.com")}" + puts "Traceloop exporting traces to #{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}" end @tracer = OpenTelemetry.tracer_provider.tracer("Traceloop") @@ -41,15 +43,15 @@ def log_messages(messages) def log_prompt(system_prompt="", user_prompt) unless system_prompt.empty? @span.add_attributes({ - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.0.role" => "system", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.0.content" => system_prompt, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.1.role" => "user", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.1.content" => user_prompt + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.0.role" => "system", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.0.content" => system_prompt, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.1.role" => "user", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.1.content" => user_prompt }) else @span.add_attributes({ - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.0.role" => "user", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_PROMPTS}.0.content" => user_prompt + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.0.role" => "user", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.0.content" => user_prompt }) end end @@ -57,9 +59,12 @@ def log_prompt(system_prompt="", user_prompt) def log_response(response) if response.respond_to?(:body) log_bedrock_response(response) + # Check for RubyLLM::Message objects + elsif response.instance_of?(::RubyLLM::Message) + log_ruby_llm_response(response) # This is Gemini specific, see - # https://github.com/gbaptista/gemini-ai?tab=readme-ov-file#generate_content - elsif response.has_key?("candidates") + elsif response.respond_to?(:has_key?) && response.has_key?("candidates") log_gemini_response(response) else log_openai_response(response) @@ -77,6 +82,38 @@ def log_gemini_response(response) }) end + def log_ruby_llm_response(response) + model = response.respond_to?(:model_id) ? response.model_id : @model + @span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => model, + }) + + if response.respond_to?(:input_tokens) && response.input_tokens && + response.respond_to?(:output_tokens) && response.output_tokens + @span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens, + }) + end + + if response.respond_to?(:content) && response.content + content_text = "" + role = response.respond_to?(:role) ? response.role.to_s : "assistant" + + # Handle RubyLLM::Content object + if response.content.respond_to?(:text) + content_text = response.content.text + elsif response.content.respond_to?(:to_s) + content_text = response.content.to_s + end + + @span.add_attributes({ + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => role, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => content_text + }) + end + end + def log_bedrock_response(response) body = JSON.parse(response.body.read()) @@ -126,7 +163,8 @@ def log_openai_response(response) def llm_call(provider, model) @tracer.in_span("#{provider}.chat") do |span| span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_REQUEST_MODEL => model, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_REQUEST_MODEL => model, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider }) yield Tracer.new(span, provider, model) end diff --git a/traceloop-sdk/traceloop-sdk.gemspec b/traceloop-sdk/traceloop-sdk.gemspec index b985c60..9acbe62 100644 --- a/traceloop-sdk/traceloop-sdk.gemspec +++ b/traceloop-sdk/traceloop-sdk.gemspec @@ -17,8 +17,8 @@ Gem::Specification.new do |spec| spec.add_dependency 'opentelemetry-semantic_conventions_ai', '~> 0.0.3' - spec.add_dependency 'opentelemetry-sdk', '~> 1.3.1' - spec.add_dependency 'opentelemetry-exporter-otlp', '~> 0.26.1' + spec.add_dependency 'opentelemetry-exporter-otlp', '~> 0.31.1' + spec.add_dependency 'opentelemetry-sdk', '~> 1.10.0' if spec.respond_to?(:metadata) spec.metadata['source_code_uri'] = 'https://github.com/traceloop/openllmetry-ruby/tree/main/traceloop-sdk' From 987d252d4a80c86750526e575604fee010711716 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Wed, 12 Nov 2025 16:56:23 -0500 Subject: [PATCH 02/17] adding halting --- traceloop-sdk/lib/traceloop/sdk.rb | 46 +++++++++++------------------- 1 file changed, 17 insertions(+), 29 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index a9e5f9f..932bb4d 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -8,7 +8,7 @@ class Traceloop def initialize OpenTelemetry::SDK.configure do |c| c.add_span_processor( - OpenTelemetry::SDK::Trace::Export::SimpleSpanProcessor.new( + OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( OpenTelemetry::Exporter::OTLP::Exporter.new( endpoint: "#{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}/v1/traces", headers: { @@ -61,7 +61,9 @@ def log_response(response) log_bedrock_response(response) # Check for RubyLLM::Message objects elsif response.instance_of?(::RubyLLM::Message) - log_ruby_llm_response(response) + log_ruby_llm_message(response) + elsif response.instance_of?(::RubyLLM::Tool::Halt) + log_ruby_llm_halt(response) # This is Gemini specific, see - # https://github.com/gbaptista/gemini-ai?tab=readme-ov-file#generate_content elsif response.respond_to?(:has_key?) && response.has_key?("candidates") @@ -82,36 +84,22 @@ def log_gemini_response(response) }) end - def log_ruby_llm_response(response) - model = response.respond_to?(:model_id) ? response.model_id : @model + def log_ruby_llm_message(response) @span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => model, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => response.model_id, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => response.role.to_s, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response.content }) + end - if response.respond_to?(:input_tokens) && response.input_tokens && - response.respond_to?(:output_tokens) && response.output_tokens - @span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens, - }) - end - - if response.respond_to?(:content) && response.content - content_text = "" - role = response.respond_to?(:role) ? response.role.to_s : "assistant" - - # Handle RubyLLM::Content object - if response.content.respond_to?(:text) - content_text = response.content.text - elsif response.content.respond_to?(:to_s) - content_text = response.content.to_s - end - - @span.add_attributes({ - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => role, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => content_text - }) - end + def log_ruby_llm_halt(response) + @span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_RESPONSE_MODEL => @model, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => "tool", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response.content + }) end def log_bedrock_response(response) From 6b2bc5f13db534a94892c611aae61dd6563496e2 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Fri, 14 Nov 2025 14:01:56 -0500 Subject: [PATCH 03/17] coderabbit comments --- traceloop-sdk/lib/traceloop/sdk.rb | 33 +++++++++++++++++++----------- 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 932bb4d..ab3d941 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -6,13 +6,16 @@ module Traceloop module SDK class Traceloop def initialize + api_key = ENV["TRACELOOP_API_KEY"] + raise "TRACELOOP_API_KEY environment variable is required" if api_key.nil? || api_key.empty? + OpenTelemetry::SDK.configure do |c| c.add_span_processor( OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( OpenTelemetry::Exporter::OTLP::Exporter.new( endpoint: "#{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}/v1/traces", headers: { - Authorization: "#{ENV.fetch("TRACELOOP_AUTH_SCHEME", "Bearer")} #{ENV.fetch("TRACELOOP_API_KEY")}" + "Authorization" => "#{ENV.fetch("TRACELOOP_AUTH_SCHEME", "Bearer")} #{ENV.fetch("TRACELOOP_API_KEY")}" } ) ) @@ -60,9 +63,9 @@ def log_response(response) if response.respond_to?(:body) log_bedrock_response(response) # Check for RubyLLM::Message objects - elsif response.instance_of?(::RubyLLM::Message) + elsif response.is_a?(::RubyLLM::Message) log_ruby_llm_message(response) - elsif response.instance_of?(::RubyLLM::Tool::Halt) + elsif response.is_a?(::RubyLLM::Tool::Halt) log_ruby_llm_halt(response) # This is Gemini specific, see - # https://github.com/gbaptista/gemini-ai?tab=readme-ov-file#generate_content @@ -80,15 +83,16 @@ def log_gemini_response(response) @span.add_attributes({ "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.role" => "assistant", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.content" => response.dig("candidates", 0, "content", "parts", 0, "text") + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.content" => response.dig( +"candidates", 0, "content", "parts", 0, "text") }) end def log_ruby_llm_message(response) @span.add_attributes({ OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => response.model_id, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens || 0, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens || 0, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => response.role.to_s, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response.content }) @@ -96,7 +100,7 @@ def log_ruby_llm_message(response) def log_ruby_llm_halt(response) @span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_RESPONSE_MODEL => @model, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => @model, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => "tool", "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response.content }) @@ -134,15 +138,20 @@ def log_openai_response(response) }) if response.has_key?("usage") @span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_TOTAL_TOKENS => response.dig("usage", "total_tokens"), - OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_COMPLETION_TOKENS => response.dig("usage", "completion_tokens"), - OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_PROMPT_TOKENS => response.dig("usage", "prompt_tokens"), + OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_TOTAL_TOKENS => response.dig("usage", + "total_tokens"), + OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_COMPLETION_TOKENS => response.dig( +"usage", "completion_tokens"), + OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_USAGE_PROMPT_TOKENS => response.dig("usage", + "prompt_tokens"), }) end if response.has_key?("choices") @span.add_attributes({ - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.role" => response.dig("choices", 0, "message", "role"), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.content" => response.dig("choices", 0, "message", "content") + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.role" => response.dig( +"choices", 0, "message", "role"), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::LLM_COMPLETIONS}.0.content" => response.dig( +"choices", 0, "message", "content") }) end end From 3e0b4c5022331343441c8a7015bf945178744412 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 17 Nov 2025 16:47:45 -0500 Subject: [PATCH 04/17] adding conversation id --- .../lib/opentelemetry/semantic_conventions.rb | 5 +++-- traceloop-sdk/lib/traceloop/sdk.rb | 19 +++++++++++++------ 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb index c482232..4b2b044 100644 --- a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb +++ b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb @@ -34,11 +34,12 @@ module SpanAttributes # Gen AI GEN_AI_REQUEST_MODEL = "gen_ai.request.model" GEN_AI_RESPONSE_MODEL = "gen_ai.response.model" - GEN_AI_USAGE_COMPLETION_TOKENS = "gen_ai.usage.completion_tokens" - GEN_AI_USAGE_PROMPT_TOKENS = "gen_ai.usage.prompt_tokens" + GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" + GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" GEN_AI_COMPLETIONS = "gen_ai.completion" GEN_AI_PROMPTS = "gen_ai.prompt" GEN_AI_SYSTEM = "gen_ai.system" + GEN_AI_PROVIDER = "gen_ai.provider.name" end module LLMRequestTypeValues diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index ab3d941..0088975 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -91,8 +91,8 @@ def log_gemini_response(response) def log_ruby_llm_message(response) @span.add_attributes({ OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => response.model_id, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_COMPLETION_TOKENS => response.output_tokens || 0, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_PROMPT_TOKENS => response.input_tokens || 0, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_OUTPUT_TOKENS => response.output_tokens || 0, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_USAGE_INPUT_TOKENS => response.input_tokens || 0, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => response.role.to_s, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response.content }) @@ -157,12 +157,19 @@ def log_openai_response(response) end end - def llm_call(provider, model) + def llm_call(provider, model, conversation_id: nil) @tracer.in_span("#{provider}.chat") do |span| - span.add_attributes({ + attributes = { OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_REQUEST_MODEL => model, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider - }) + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROVIDER => provider, + } + + if conversation_id + attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = conversation_id + end + + span.add_attributes(attributes) yield Tracer.new(span, provider, model) end end From da3f2b4e23dd41cda00ea92242725996cd20c7e7 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 17 Nov 2025 16:54:03 -0500 Subject: [PATCH 05/17] adding conversation id -- fix --- .../lib/opentelemetry/semantic_conventions.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb index 4b2b044..b7b55fc 100644 --- a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb +++ b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb @@ -40,6 +40,7 @@ module SpanAttributes GEN_AI_PROMPTS = "gen_ai.prompt" GEN_AI_SYSTEM = "gen_ai.system" GEN_AI_PROVIDER = "gen_ai.provider.name" + GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id" end module LLMRequestTypeValues From 6509a5df7a62128ea61ee8e396f4653ea734eb01 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 17 Nov 2025 17:16:57 -0500 Subject: [PATCH 06/17] guarding rubyLLM usage --- traceloop-sdk/lib/traceloop/sdk.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 0088975..e5dd2d3 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -63,9 +63,9 @@ def log_response(response) if response.respond_to?(:body) log_bedrock_response(response) # Check for RubyLLM::Message objects - elsif response.is_a?(::RubyLLM::Message) + elsif defined?(::RubyLLM::Message) && response.is_a?(::RubyLLM::Message) log_ruby_llm_message(response) - elsif response.is_a?(::RubyLLM::Tool::Halt) + elsif defined?(::RubyLLM::Tool::Halt) && response.is_a?(::RubyLLM::Tool::Halt) log_ruby_llm_halt(response) # This is Gemini specific, see - # https://github.com/gbaptista/gemini-ai?tab=readme-ov-file#generate_content From c17e1ebea2bb1cccb1ca2707f37b0ff31bd4589d Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 1 Dec 2025 11:38:55 -0500 Subject: [PATCH 07/17] prompt filter usage --- traceloop-sdk/lib/traceloop/sdk.rb | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index e5dd2d3..f2be43d 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -213,6 +213,35 @@ def tool(name) yield end end + + class GuardrailTracer + def initialize(span, provider) + @span = span + @provider = provider + end + + def log_guardrail_response(response) + @span.add_attributes({ + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => response, + }) + end + end + + def guardrail(name, provider, conversation_id: nil) + @tracer.in_span("#{name}.guardrails") do |span| + attributes = { + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROVIDER => provider, + } + + if conversation_id + attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = conversation_id + end + + span.add_attributes(attributes) + yield GuardrailTracer.new(span, provider) + end + end end end end From f7a7591ae0f70764963602bf6ea32c55514ba809 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 1 Dec 2025 16:00:34 -0500 Subject: [PATCH 08/17] adding a bit more --- .../lib/opentelemetry/semantic_conventions.rb | 1 + traceloop-sdk/lib/traceloop/sdk.rb | 97 ++++++++++++++++++- 2 files changed, 95 insertions(+), 3 deletions(-) diff --git a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb index b7b55fc..e0fc86c 100644 --- a/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb +++ b/semantic_conventions_ai/lib/opentelemetry/semantic_conventions.rb @@ -41,6 +41,7 @@ module SpanAttributes GEN_AI_SYSTEM = "gen_ai.system" GEN_AI_PROVIDER = "gen_ai.provider.name" GEN_AI_CONVERSATION_ID = "gen_ai.conversation.id" + GEN_AI_BEDROCK_GUARDRAILS = "gen_ai.bedrock.guardrail" end module LLMRequestTypeValues diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index f2be43d..00c1248 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -221,9 +221,100 @@ def initialize(span, provider) end def log_guardrail_response(response) - @span.add_attributes({ - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => response, - }) + # Normalize keys to strings to make access easier + r = deep_stringify_keys(response || {}) + + activation = guardrail_activation(r) + blocked_words = guardrail_blocked_words(r) + content_filtered = guardrail_content_filtered(r) + + attrs = { + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => response, + + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, # boolean + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => blocked_words, # integer + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content" => content_filtered, # integer (0/1) + + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action" => r["action"] || "NONE", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content_policy_units" => (r.dig("usage", "content_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.word_policy_units" => (r.dig("usage", "word_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.topic_policy_units" => (r.dig("usage", "topic_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.sensitive_policy_units" => (r.dig("usage", "sensitive_information_policy_units") || 0) + } + + @span.add_attributes(attrs) + end + + private + + def deep_stringify_keys(obj) + case obj + when Hash + obj.each_with_object({}) do |(k, v), h| + h[k.to_s] = deep_stringify_keys(v) + end + when Array + obj.map { |v| deep_stringify_keys(v) } + else + obj + end + end + + def guardrail_activation(r) + usage = r["usage"] || {} + + units = + (usage["topic_policy_units"] || 0).to_i + + (usage["content_policy_units"] || 0).to_i + + (usage["word_policy_units"] || 0).to_i + + (usage["sensitive_information_policy_units"] || 0).to_i + + units > 0 || (r["assessments"].is_a?(Array) && !r["assessments"].empty?) + end + + def guardrail_blocked_words(r) + assessments = r["assessments"] || [] + + total = 0 + + assessments.each do |a| + word_policy = a["word_policy"] || {} + + # custom_words: [{ "match" => "API", "action" => "BLOCKED", "detected" => true }] + custom_words = word_policy["custom_words"] || [] + custom_words.each do |cw| + if cw["detected"] == true || cw["action"] == "BLOCKED" + total += 1 + end + end + + managed_lists = word_policy["managed_word_lists"] || [] + managed_lists.each do |entry| + if entry["detected"] == true || entry["action"] == "BLOCKED" + total += 1 + end + end + end + + total + end + + def guardrail_content_filtered(r) + action = r["action"] + return 1 if action && action != "NONE" + + assessments = r["assessments"] || [] + assessments.each do |a| + filters = a.dig("content_policy", "filters") || [] + filters.each do |f| + detected = f["detected"] + fa = f["action"] + + return 1 if detected == true || (fa && fa != "NONE") + end + end + + 0 end end From 9392c5e040079f5a3e7dabe8bf3cd121a3750c7b Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Mon, 1 Dec 2025 16:08:25 -0500 Subject: [PATCH 09/17] changing prompt_filter_results --- traceloop-sdk/lib/traceloop/sdk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 00c1248..7b9ceaf 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -229,7 +229,7 @@ def log_guardrail_response(response) content_filtered = guardrail_content_filtered(r) attrs = { - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => response, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => r["action"] || "NONE", "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, # boolean "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => blocked_words, # integer From 668e15ee90a0e46e34a3e9ea6fc4c74d3531deac Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Tue, 2 Dec 2025 16:32:39 -0500 Subject: [PATCH 10/17] moving to same tracer --- traceloop-sdk/lib/traceloop/sdk.rb | 146 ++++++++++++++--------------- 1 file changed, 70 insertions(+), 76 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 7b9ceaf..928751b 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -155,90 +155,26 @@ def log_openai_response(response) }) end end - end - - def llm_call(provider, model, conversation_id: nil) - @tracer.in_span("#{provider}.chat") do |span| - attributes = { - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_REQUEST_MODEL => model, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider, - OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROVIDER => provider, - } - - if conversation_id - attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = conversation_id - end - - span.add_attributes(attributes) - yield Tracer.new(span, provider, model) - end - end - - def workflow(name) - @tracer.in_span("#{name}.workflow") do |span| - span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "workflow", - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, - }) - yield - end - end - - def task(name) - @tracer.in_span("#{name}.task") do |span| - span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "task", - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, - }) - yield - end - end - - def agent(name) - @tracer.in_span("#{name}.agent") do |span| - span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "agent", - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, - }) - yield - end - end - - def tool(name) - @tracer.in_span("#{name}.tool") do |span| - span.add_attributes({ - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "tool", - OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, - }) - yield - end - end - - class GuardrailTracer - def initialize(span, provider) - @span = span - @provider = provider - end def log_guardrail_response(response) - # Normalize keys to strings to make access easier r = deep_stringify_keys(response || {}) - activation = guardrail_activation(r) - blocked_words = guardrail_blocked_words(r) + activation = guardrail_activation(r) + blocked_words = guardrail_blocked_words(r) content_filtered = guardrail_content_filtered(r) attrs = { "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => r["action"] || "NONE", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, # boolean - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => blocked_words, # integer - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content" => content_filtered, # integer (0/1) + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => blocked_words, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content" => content_filtered, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action" => r["action"] || "NONE", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content_policy_units" => (r.dig("usage", "content_policy_units") || 0), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.word_policy_units" => (r.dig("usage", "word_policy_units") || 0), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.topic_policy_units" => (r.dig("usage", "topic_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action" => r["action"] || "NONE", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action_reason" => r["action_reason"] || "No action.", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content_policy_units" => (r.dig("usage", "content_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.word_policy_units" => (r.dig("usage", "word_policy_units") || 0), + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.topic_policy_units" => (r.dig("usage", "topic_policy_units") || 0), "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.sensitive_policy_units" => (r.dig("usage", "sensitive_information_policy_units") || 0) } @@ -318,6 +254,63 @@ def guardrail_content_filtered(r) end end + def llm_call(provider, model, conversation_id: nil) + @tracer.in_span("#{provider}.chat") do |span| + attributes = { + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_REQUEST_MODEL => model, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_SYSTEM => provider, + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROVIDER => provider, + } + + if conversation_id + attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = conversation_id + end + + span.add_attributes(attributes) + yield Tracer.new(span, provider, model) + end + end + + def workflow(name) + @tracer.in_span("#{name}.workflow") do |span| + span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "workflow", + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, + }) + yield + end + end + + def task(name) + @tracer.in_span("#{name}.task") do |span| + span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "task", + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, + }) + yield + end + end + + def agent(name) + @tracer.in_span("#{name}.agent") do |span| + span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "agent", + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, + }) + yield + end + end + + def tool(name) + @tracer.in_span("#{name}.tool") do |span| + span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_SPAN_KIND => "tool", + OpenTelemetry::SemanticConventionsAi::SpanAttributes::TRACELOOP_ENTITY_NAME => name, + }) + yield + end + end + def guardrail(name, provider, conversation_id: nil) @tracer.in_span("#{name}.guardrails") do |span| attributes = { @@ -326,11 +319,12 @@ def guardrail(name, provider, conversation_id: nil) } if conversation_id - attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = conversation_id + attributes[OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_CONVERSATION_ID] = + conversation_id end span.add_attributes(attributes) - yield GuardrailTracer.new(span, provider) + yield end end end From ada94a80398526f69ccfbcb18dfaf235571b81c9 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Tue, 2 Dec 2025 17:37:34 -0500 Subject: [PATCH 11/17] adding string response --- traceloop-sdk/lib/traceloop/sdk.rb | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 928751b..0ea561e 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -71,6 +71,8 @@ def log_response(response) # https://github.com/gbaptista/gemini-ai?tab=readme-ov-file#generate_content elsif response.respond_to?(:has_key?) && response.has_key?("candidates") log_gemini_response(response) + elsif response.is_a?(String) + log_string_message(response) else log_openai_response(response) end @@ -106,6 +108,15 @@ def log_ruby_llm_halt(response) }) end + # enables users to log messages with raw text that did not come from an LLM, this allows DT to complete traces + def log_string_message(response) + @span.add_attributes({ + OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_RESPONSE_MODEL => @model, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.role" => "assistant", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_COMPLETIONS}.0.content" => response + }) + end + def log_bedrock_response(response) body = JSON.parse(response.body.read()) From c4c16664a2d6983acb40949bd1504c2beb74ae1a Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Thu, 4 Dec 2025 15:59:59 -0500 Subject: [PATCH 12/17] changing to add more info --- traceloop-sdk/lib/traceloop/sdk.rb | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 0ea561e..2c843c0 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -170,23 +170,22 @@ def log_openai_response(response) def log_guardrail_response(response) r = deep_stringify_keys(response || {}) - activation = guardrail_activation(r) - blocked_words = guardrail_blocked_words(r) - content_filtered = guardrail_content_filtered(r) + activation = guardrail_activation(r) + words_blocked, blocked_words = guardrail_blocked_words(r) + content_filtered, type, confidence = guardrail_content_filtered(r) attrs = { "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => r["action"] || "NONE", "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => blocked_words, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => words_blocked, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content" => content_filtered, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action" => r["action"] || "NONE", "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action_reason" => r["action_reason"] || "No action.", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content_policy_units" => (r.dig("usage", "content_policy_units") || 0), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.word_policy_units" => (r.dig("usage", "word_policy_units") || 0), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.topic_policy_units" => (r.dig("usage", "topic_policy_units") || 0), - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.sensitive_policy_units" => (r.dig("usage", "sensitive_information_policy_units") || 0) + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words.blocked_words_detected" => blocked_words.to_s, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content.type" => type, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content.confidence" => confidence, } @span.add_attributes(attrs) @@ -223,6 +222,7 @@ def guardrail_blocked_words(r) assessments = r["assessments"] || [] total = 0 + blocked_words = [] assessments.each do |a| word_policy = a["word_policy"] || {} @@ -232,6 +232,7 @@ def guardrail_blocked_words(r) custom_words.each do |cw| if cw["detected"] == true || cw["action"] == "BLOCKED" total += 1 + blocked_words.append(cw["match"]) end end @@ -239,11 +240,12 @@ def guardrail_blocked_words(r) managed_lists.each do |entry| if entry["detected"] == true || entry["action"] == "BLOCKED" total += 1 + blocked_words.append(entry["match"]) end end end - total + [total, blocked_words] end def guardrail_content_filtered(r) @@ -255,13 +257,13 @@ def guardrail_content_filtered(r) filters = a.dig("content_policy", "filters") || [] filters.each do |f| detected = f["detected"] - fa = f["action"] + action = f["action"] - return 1 if detected == true || (fa && fa != "NONE") + return [1, f["type"], f["confidence"]] if detected == true || (detected && action != "NONE") end end - 0 + [0, "", ""] end end From cbcfb22fb20a8a64d7d81956427b9c3c6486a420 Mon Sep 17 00:00:00 2001 From: Ania Misiorek Date: Thu, 4 Dec 2025 16:53:08 -0500 Subject: [PATCH 13/17] removing random return --- traceloop-sdk/lib/traceloop/sdk.rb | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index 2c843c0..8c7eed6 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -175,7 +175,7 @@ def log_guardrail_response(response) content_filtered, type, confidence = guardrail_content_filtered(r) attrs = { - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => r["action"] || "NONE", + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_PROMPTS}.prompt_filter_results" => [type, confidence].to_s, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.activation" => activation, "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words" => words_blocked, @@ -183,9 +183,7 @@ def log_guardrail_response(response) "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action" => r["action"] || "NONE", "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.action_reason" => r["action_reason"] || "No action.", - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words.blocked_words_detected" => blocked_words.to_s, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content.type" => type, - "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.content.confidence" => confidence, + "#{OpenTelemetry::SemanticConventionsAi::SpanAttributes::GEN_AI_BEDROCK_GUARDRAILS}.words.blocked_words" => blocked_words.to_s, } @span.add_attributes(attrs) @@ -249,17 +247,16 @@ def guardrail_blocked_words(r) end def guardrail_content_filtered(r) - action = r["action"] - return 1 if action && action != "NONE" - assessments = r["assessments"] || [] assessments.each do |a| filters = a.dig("content_policy", "filters") || [] filters.each do |f| detected = f["detected"] action = f["action"] + type = f["type"] + confidence = f["confidence"] - return [1, f["type"], f["confidence"]] if detected == true || (detected && action != "NONE") + return [1, type, confidence] if detected == true || (detected && action != "NONE") end end From 3d4fc213d8e1b15e92dd8151e5da864f1e610b78 Mon Sep 17 00:00:00 2001 From: Jake Brush Date: Thu, 19 Feb 2026 17:44:11 -0500 Subject: [PATCH 14/17] VMAD-3322 add name param --- README.md | 52 ++++++++++++++++++++++++++++++ sample-app/bedrock.rb | 6 ++++ sample-app/openai.rb | 10 ++++++ traceloop-sdk/lib/traceloop/sdk.rb | 51 ++++++++++++++++++++--------- 4 files changed, 104 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index b4be3bb..c143788 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,58 @@ That's it. You're now tracing your code with OpenLLMetry! Now, you need to decide where to export the traces to. +## ⚙️ Configuration + +### Service Name + +You can customize your service name by providing a `name` parameter: + +```ruby +require "traceloop/sdk" + +# Without name suffix (uses OTEL_SERVICE_NAME as-is) +traceloop = Traceloop::SDK::Traceloop.new + +# With name suffix (prepends to OTEL_SERVICE_NAME) +traceloop = Traceloop::SDK::Traceloop.new(name: "worker") +# If OTEL_SERVICE_NAME="my-app", this creates "worker-my-app" +``` + +### Multiple Service Instances + +You can create multiple Traceloop instances with different service names in the same application: + +```ruby +traceloop_api = Traceloop::SDK::Traceloop.new(name: "api") +traceloop_worker = Traceloop::SDK::Traceloop.new(name: "worker") +traceloop_scheduler = Traceloop::SDK::Traceloop.new(name: "scheduler") + +# Each instance traces with its own service name: +# - "api-my-app" +# - "worker-my-app" +# - "scheduler-my-app" +``` + +### Environment Variables + +Set the base service name using the standard OpenTelemetry environment variable: + +```bash +export OTEL_SERVICE_NAME="my-app" +``` + +If not set, defaults to `"unknown_service:ruby"`. + +### Cleanup + +When shutting down your application, ensure spans are properly flushed: + +```ruby +traceloop = Traceloop::SDK::Traceloop.new +# ... use traceloop ... +traceloop.shutdown # Flush remaining spans before exit +``` + ## ⏫ Supported (and tested) destinations - [x] [Traceloop](https://www.traceloop.com/docs/openllmetry/integrations/traceloop) diff --git a/sample-app/bedrock.rb b/sample-app/bedrock.rb index 08d95eb..dfcd216 100644 --- a/sample-app/bedrock.rb +++ b/sample-app/bedrock.rb @@ -1,8 +1,14 @@ require 'aws-sdk-bedrockruntime' require "traceloop/sdk" +# Example 1: No name suffix (backward compatible) +# Uses OTEL_SERVICE_NAME as-is, or defaults to "unknown_service:ruby" traceloop = Traceloop::SDK::Traceloop.new +# Example 2: With name suffix +# If OTEL_SERVICE_NAME="my-app", this creates "bedrock-worker-my-app" +# traceloop = Traceloop::SDK::Traceloop.new(name: "bedrock-worker") + model = "anthropic.claude-3-sonnet-20240229-v1:0" traceloop.llm_call(provider="bedrock", model=model) do |tracer| diff --git a/sample-app/openai.rb b/sample-app/openai.rb index 1d3994c..b56b071 100644 --- a/sample-app/openai.rb +++ b/sample-app/openai.rb @@ -7,8 +7,18 @@ client = OpenAI::Client.new +# Example 1: No name suffix (backward compatible) +# Uses OTEL_SERVICE_NAME as-is, or defaults to "unknown_service:ruby" traceloop = Traceloop::SDK::Traceloop.new +# Example 2: With name suffix +# If OTEL_SERVICE_NAME="my-app", this creates "worker-my-app" +# traceloop_worker = Traceloop::SDK::Traceloop.new(name: "worker") + +# Example 3: Multiple instances +# traceloop_api = Traceloop::SDK::Traceloop.new(name: "api") +# traceloop_background = Traceloop::SDK::Traceloop.new(name: "background") + traceloop.workflow("joke_generator") do traceloop.llm_call(provider="openai", model="gpt-3.5-turbo") do |tracer| tracer.log_prompt(user_prompt="Tell me a joke about OpenTelemetry") diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index e5dd2d3..eb45d8e 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -5,25 +5,46 @@ module Traceloop module SDK class Traceloop - def initialize + def initialize(name: nil) api_key = ENV["TRACELOOP_API_KEY"] raise "TRACELOOP_API_KEY environment variable is required" if api_key.nil? || api_key.empty? - OpenTelemetry::SDK.configure do |c| - c.add_span_processor( - OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new( - OpenTelemetry::Exporter::OTLP::Exporter.new( - endpoint: "#{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}/v1/traces", - headers: { - "Authorization" => "#{ENV.fetch("TRACELOOP_AUTH_SCHEME", "Bearer")} #{ENV.fetch("TRACELOOP_API_KEY")}" - } - ) - ) - ) - puts "Traceloop exporting traces to #{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}" - end + # Construct service name + base_name = ENV["OTEL_SERVICE_NAME"] || "unknown_service:ruby" + @service_name = name ? "#{name}-#{base_name}" : base_name + + # Create resource with service name + resource = OpenTelemetry::SDK::Resources::Resource.create( + OpenTelemetry::SemanticConventions::Resource::SERVICE_NAME => @service_name + ) + + # Create instance-specific tracer provider + @tracer_provider = OpenTelemetry::SDK::Trace::TracerProvider.new( + resource: resource + ) + + # Configure OTLP exporter for this instance + exporter = OpenTelemetry::Exporter::OTLP::Exporter.new( + endpoint: "#{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}/v1/traces", + headers: { + "Authorization" => "#{ENV.fetch("TRACELOOP_AUTH_SCHEME", "Bearer")} #{ENV.fetch("TRACELOOP_API_KEY")}" + } + ) + + # Add span processor to this instance's provider + @tracer_provider.add_span_processor( + OpenTelemetry::SDK::Trace::Export::BatchSpanProcessor.new(exporter) + ) + + puts "Traceloop exporting traces to #{ENV.fetch("TRACELOOP_BASE_URL", "https://api.traceloop.com")}" + puts "Service name: #{@service_name}" + + # Get tracer from instance-specific provider + @tracer = @tracer_provider.tracer("Traceloop", version: "0.1.5") + end - @tracer = OpenTelemetry.tracer_provider.tracer("Traceloop") + def shutdown + @tracer_provider&.shutdown end class Tracer From ee923fe5341c9380c358d4b604b8aaef9b50f048 Mon Sep 17 00:00:00 2001 From: Jake Brush Date: Thu, 19 Feb 2026 18:19:44 -0500 Subject: [PATCH 15/17] VMAD-332 change to a string --- traceloop-sdk/lib/traceloop/sdk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index eb45d8e..b8f4b19 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -40,7 +40,7 @@ def initialize(name: nil) puts "Service name: #{@service_name}" # Get tracer from instance-specific provider - @tracer = @tracer_provider.tracer("Traceloop", version: "0.1.5") + @tracer = @tracer_provider.tracer("Traceloop", "0.1.5") end def shutdown From 679792badaf677867227b7580f72fc90d7993f5d Mon Sep 17 00:00:00 2001 From: Jake Brush Date: Fri, 20 Feb 2026 10:17:29 -0500 Subject: [PATCH 16/17] VMAD-3322 separating enviroment and service --- traceloop-sdk/lib/traceloop/sdk.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/traceloop-sdk/lib/traceloop/sdk.rb b/traceloop-sdk/lib/traceloop/sdk.rb index b8f4b19..11b2871 100644 --- a/traceloop-sdk/lib/traceloop/sdk.rb +++ b/traceloop-sdk/lib/traceloop/sdk.rb @@ -11,7 +11,8 @@ def initialize(name: nil) # Construct service name base_name = ENV["OTEL_SERVICE_NAME"] || "unknown_service:ruby" - @service_name = name ? "#{name}-#{base_name}" : base_name + otel_environment = ENV["OTEL_ENVIRONMENT"] || "unknown" + @service_name = name ? "#{name}-#{otel_environment}" : base_name # Create resource with service name resource = OpenTelemetry::SDK::Resources::Resource.create( From a557af933faa88cf0bdca9c83334696c12218427 Mon Sep 17 00:00:00 2001 From: Jake Brush Date: Fri, 20 Feb 2026 11:13:37 -0500 Subject: [PATCH 17/17] VMAD-3322 update documentation --- README.md | 26 +++++++++++++++++--------- sample-app/Gemfile.lock | 5 +++-- sample-app/bedrock.rb | 7 ++++--- sample-app/gemini.rb | 7 +++++++ sample-app/openai.rb | 12 ++++++++---- 5 files changed, 39 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index c143788..1dbe9b5 100644 --- a/README.md +++ b/README.md @@ -77,12 +77,14 @@ You can customize your service name by providing a `name` parameter: ```ruby require "traceloop/sdk" -# Without name suffix (uses OTEL_SERVICE_NAME as-is) +# Without name parameter (uses OTEL_SERVICE_NAME as-is) traceloop = Traceloop::SDK::Traceloop.new +# Service name: value of OTEL_SERVICE_NAME, or "unknown_service:ruby" -# With name suffix (prepends to OTEL_SERVICE_NAME) +# With name parameter (combines name with OTEL_ENVIRONMENT) traceloop = Traceloop::SDK::Traceloop.new(name: "worker") -# If OTEL_SERVICE_NAME="my-app", this creates "worker-my-app" +# Service name: "worker-production" (if OTEL_ENVIRONMENT="production") +# Service name: "worker-unknown" (if OTEL_ENVIRONMENT not set) ``` ### Multiple Service Instances @@ -94,21 +96,27 @@ traceloop_api = Traceloop::SDK::Traceloop.new(name: "api") traceloop_worker = Traceloop::SDK::Traceloop.new(name: "worker") traceloop_scheduler = Traceloop::SDK::Traceloop.new(name: "scheduler") -# Each instance traces with its own service name: -# - "api-my-app" -# - "worker-my-app" -# - "scheduler-my-app" +# Each instance traces with its own service name (assuming OTEL_ENVIRONMENT="production"): +# - "api-production" +# - "worker-production" +# - "scheduler-production" ``` ### Environment Variables -Set the base service name using the standard OpenTelemetry environment variable: +Control your service naming using standard OpenTelemetry environment variables: ```bash +# Used when no name parameter is provided export OTEL_SERVICE_NAME="my-app" + +# Combined with name parameter: "worker-production" +export OTEL_ENVIRONMENT="production" ``` -If not set, defaults to `"unknown_service:ruby"`. +Defaults: +- `OTEL_SERVICE_NAME` defaults to `"unknown_service:ruby"` +- `OTEL_ENVIRONMENT` defaults to `"unknown"` ### Cleanup diff --git a/sample-app/Gemfile.lock b/sample-app/Gemfile.lock index ad4418e..8be0a65 100644 --- a/sample-app/Gemfile.lock +++ b/sample-app/Gemfile.lock @@ -30,7 +30,7 @@ GEM faraday-typhoeus (1.1.0) faraday (~> 2.0) typhoeus (~> 1.4) - ffi (1.17.0-arm64-darwin) + ffi (1.17.0) gemini-ai (4.2.0) event_stream_parser (~> 1.0) faraday (~> 2.10) @@ -39,7 +39,7 @@ GEM typhoeus (~> 1.4, >= 1.4.1) google-cloud-env (2.2.1) faraday (>= 1.0, < 3.a) - google-protobuf (3.25.5-arm64-darwin) + google-protobuf (3.25.5) googleapis-common-protos-types (1.16.0) google-protobuf (>= 3.18, < 5.a) googleauth (1.11.2) @@ -100,6 +100,7 @@ GEM PLATFORMS arm64-darwin-23 + x86_64-linux DEPENDENCIES aws-sdk-bedrockruntime (~> 1.14) diff --git a/sample-app/bedrock.rb b/sample-app/bedrock.rb index dfcd216..fa8505a 100644 --- a/sample-app/bedrock.rb +++ b/sample-app/bedrock.rb @@ -1,12 +1,13 @@ require 'aws-sdk-bedrockruntime' require "traceloop/sdk" -# Example 1: No name suffix (backward compatible) +# Example 1: No name parameter (backward compatible) # Uses OTEL_SERVICE_NAME as-is, or defaults to "unknown_service:ruby" traceloop = Traceloop::SDK::Traceloop.new -# Example 2: With name suffix -# If OTEL_SERVICE_NAME="my-app", this creates "bedrock-worker-my-app" +# Example 2: With name parameter +# Creates service name as "#{name}-#{OTEL_ENVIRONMENT}" +# If OTEL_ENVIRONMENT="production", this creates "bedrock-worker-production" # traceloop = Traceloop::SDK::Traceloop.new(name: "bedrock-worker") model = "anthropic.claude-3-sonnet-20240229-v1:0" diff --git a/sample-app/gemini.rb b/sample-app/gemini.rb index 6b411da..f261449 100644 --- a/sample-app/gemini.rb +++ b/sample-app/gemini.rb @@ -9,8 +9,15 @@ options: { model: 'gemini-pro', server_sent_events: true } ) +# Example 1: No name parameter (backward compatible) +# Uses OTEL_SERVICE_NAME as-is, or defaults to "unknown_service:ruby" traceloop = Traceloop::SDK::Traceloop.new +# Example 2: With name parameter +# Creates service name as "#{name}-#{OTEL_ENVIRONMENT}" +# If OTEL_ENVIRONMENT="production", this creates "gemini-worker-production" +# traceloop = Traceloop::SDK::Traceloop.new(name: "gemini-worker") + traceloop.llm_call(provider="vertexai", model="gemini-pro") do |tracer| tracer.log_prompt(user_prompt="Tell me a joke about OpenTelemetry") response = client.generate_content( diff --git a/sample-app/openai.rb b/sample-app/openai.rb index b56b071..68951e3 100644 --- a/sample-app/openai.rb +++ b/sample-app/openai.rb @@ -7,15 +7,19 @@ client = OpenAI::Client.new -# Example 1: No name suffix (backward compatible) +# Example 1: No name parameter (backward compatible) # Uses OTEL_SERVICE_NAME as-is, or defaults to "unknown_service:ruby" traceloop = Traceloop::SDK::Traceloop.new -# Example 2: With name suffix -# If OTEL_SERVICE_NAME="my-app", this creates "worker-my-app" +# Example 2: With name parameter +# Creates service name as "#{name}-#{OTEL_ENVIRONMENT}" +# If OTEL_ENVIRONMENT="production", this creates "worker-production" # traceloop_worker = Traceloop::SDK::Traceloop.new(name: "worker") -# Example 3: Multiple instances +# Example 3: Multiple instances with different names +# If OTEL_ENVIRONMENT="production": +# - traceloop_api: "api-production" +# - traceloop_background: "background-production" # traceloop_api = Traceloop::SDK::Traceloop.new(name: "api") # traceloop_background = Traceloop::SDK::Traceloop.new(name: "background")