diff --git a/lib/ruby_llm/providers/anthropic/streaming.rb b/lib/ruby_llm/providers/anthropic/streaming.rb index 014a5c792..320a147a1 100644 --- a/lib/ruby_llm/providers/anthropic/streaming.rb +++ b/lib/ruby_llm/providers/anthropic/streaming.rb @@ -7,6 +7,12 @@ class Anthropic module Streaming private + def stream_response(connection, payload, additional_headers = {}, &) + # Avoid Net::HTTP's auto-inflate: Cloudflare's gzip flushes + # infrequently for SSE, batching chunks until each flush. + super(connection, payload, additional_headers.merge('Accept-Encoding' => 'identity'), &) + end + def stream_url completion_url end diff --git a/spec/ruby_llm/providers/anthropic/streaming_spec.rb b/spec/ruby_llm/providers/anthropic/streaming_spec.rb new file mode 100644 index 000000000..dd0552432 --- /dev/null +++ b/spec/ruby_llm/providers/anthropic/streaming_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe RubyLLM::Providers::Anthropic::Streaming do + include_context 'with configured RubyLLM' + + it 'sends Accept-Encoding: identity on streaming requests' do + captured = nil + + stub_request(:post, %r{api\.anthropic\.com/v1/messages}) + .with { |req| captured = req.headers['Accept-Encoding'] } + .to_return( + status: 200, + body: '', + headers: { 'Content-Type' => 'text/event-stream' } + ) + + chat = RubyLLM.chat(model: 'claude-haiku-4-5', provider: :anthropic) + chat.ask('hi') { |_chunk| nil } + + expect(captured).to eq('identity') + end +end