88Runs as a background daemon inside the container. OpenHands SHTTP config
99points at http://localhost:<port> with no api_key; this proxy adds auth.
1010
11+ Deepsearch filtering: deepsearch/deepsearch_read tool calls are intercepted
12+ and rejected with a helpful error because OpenHands' MCP client has a ~30s
13+ internal HTTP timeout that kills long-running deepsearch requests.
14+
1115Usage:
1216 SG_MCP_URL=https://sourcegraph.sourcegraph.com/.api/mcp \
1317 SG_MCP_TOKEN=sgp_... \
1721"""
1822
1923import argparse
24+ import json
2025import os
2126import sys
27+ import time
2228from http .server import HTTPServer , BaseHTTPRequestHandler
2329import urllib .request
2430import urllib .error
2531
2632SG_URL = os .environ .get ("SG_MCP_URL" , "https://sourcegraph.sourcegraph.com/.api/mcp" )
2733SG_TOKEN = os .environ .get ("SG_MCP_TOKEN" , "" )
2834
35+ # Tools that require long-running async calls and will hit OpenHands' ~30s
36+ # internal HTTP timeout, crashing the agent.
37+ _BLOCKED_TOOLS = {"deepsearch" , "deepsearch_read" }
38+
39+ # Max retries for upstream 5xx errors
40+ _MAX_RETRIES = 2
41+ _RETRY_DELAY = 2 # seconds
2942
30- class ProxyHandler (BaseHTTPRequestHandler ):
31- def do_POST (self ):
32- content_length = int (self .headers .get ("Content-Length" , 0 ))
33- body = self .rfile .read (content_length ) if content_length else b""
3443
35- # Forward headers, replacing auth
44+ def _is_blocked_tool_call (body : bytes ) -> tuple [bool , str ]:
45+ """Check if a JSON-RPC request is calling a blocked tool."""
46+ try :
47+ req = json .loads (body )
48+ if req .get ("method" ) == "tools/call" :
49+ tool_name = req .get ("params" , {}).get ("name" , "" )
50+ if tool_name in _BLOCKED_TOOLS :
51+ return True , tool_name
52+ except (json .JSONDecodeError , AttributeError ):
53+ pass
54+ return False , ""
55+
56+
57+ def _make_tool_error_response (body : bytes , tool_name : str ) -> bytes :
58+ """Return a JSON-RPC error response for a blocked tool call."""
59+ try :
60+ req = json .loads (body )
61+ req_id = req .get ("id" )
62+ except (json .JSONDecodeError , AttributeError ):
63+ req_id = None
64+ resp = {
65+ "jsonrpc" : "2.0" ,
66+ "id" : req_id ,
67+ "result" : {
68+ "content" : [
69+ {
70+ "type" : "text" ,
71+ "text" : (
72+ f"Error: '{ tool_name } ' is unavailable in this environment "
73+ f"due to timeout constraints. Use 'keyword_search' or "
74+ f"'nls_search' instead for code discovery."
75+ ),
76+ }
77+ ],
78+ "isError" : True ,
79+ },
80+ }
81+ return json .dumps (resp ).encode ()
82+
83+
84+ def _forward_request (url , body , headers , method , retries = _MAX_RETRIES ):
85+ """Forward a request to upstream with retry on 5xx errors."""
86+ req = urllib .request .Request (url , data = body , headers = headers , method = method )
87+ last_exc = None
88+ for attempt in range (retries + 1 ):
89+ try :
90+ with urllib .request .urlopen (req , timeout = 300 ) as resp :
91+ resp_body = resp .read ()
92+ return resp .status , resp .getheaders (), resp_body
93+ except urllib .error .HTTPError as e :
94+ if e .code >= 500 and attempt < retries :
95+ last_exc = e
96+ time .sleep (_RETRY_DELAY )
97+ continue
98+ raise
99+ except (urllib .error .URLError , TimeoutError ) as e :
100+ if attempt < retries :
101+ last_exc = e
102+ time .sleep (_RETRY_DELAY )
103+ continue
104+ raise
105+ raise last_exc
106+
107+
108+ class ProxyHandler (BaseHTTPRequestHandler ):
109+ def _build_fwd_headers (self ):
36110 fwd_headers = {}
37111 for key , val in self .headers .items ():
38112 lower = key .lower ()
39113 if lower in ("host" , "authorization" , "s" , "x-session-api-key" ):
40114 continue
41115 fwd_headers [key ] = val
42-
43116 if SG_TOKEN :
44117 fwd_headers ["Authorization" ] = f"token { SG_TOKEN } "
45118 fwd_headers ["Host" ] = urllib .request .urlparse (SG_URL ).netloc
119+ return fwd_headers
46120
47- req = urllib .request .Request (
48- SG_URL , data = body , headers = fwd_headers , method = "POST"
49- )
121+ def do_POST (self ):
122+ content_length = int (self .headers .get ("Content-Length" , 0 ))
123+ body = self .rfile .read (content_length ) if content_length else b""
124+
125+ # Block deepsearch tools that will timeout
126+ blocked , tool_name = _is_blocked_tool_call (body )
127+ if blocked :
128+ error_body = _make_tool_error_response (body , tool_name )
129+ self .send_response (200 )
130+ self .send_header ("Content-Type" , "application/json" )
131+ self .send_header ("Content-Length" , str (len (error_body )))
132+ self .end_headers ()
133+ self .wfile .write (error_body )
134+ return
135+
136+ fwd_headers = self ._build_fwd_headers ()
50137
51138 try :
52- with urllib .request .urlopen (req , timeout = 300 ) as resp :
53- resp_body = resp .read ()
54- self .send_response (resp .status )
55- for key , val in resp .getheaders ():
56- if key .lower () not in ("transfer-encoding" , "connection" ):
57- self .send_header (key , val )
58- self .end_headers ()
59- self .wfile .write (resp_body )
139+ status , resp_headers , resp_body = _forward_request (
140+ SG_URL , body , fwd_headers , "POST"
141+ )
142+ self .send_response (status )
143+ for key , val in resp_headers :
144+ if key .lower () not in ("transfer-encoding" , "connection" ):
145+ self .send_header (key , val )
146+ self .end_headers ()
147+ self .wfile .write (resp_body )
60148 except urllib .error .HTTPError as e :
61149 self .send_response (e .code )
62150 self .send_header ("Content-Type" , "application/json" )
@@ -70,29 +158,18 @@ def do_POST(self):
70158 self .wfile .write (str (e ).encode ())
71159
72160 def do_GET (self ):
73- # MCP streamable HTTP also uses GET for SSE streams
74- fwd_headers = {}
75- for key , val in self .headers .items ():
76- lower = key .lower ()
77- if lower in ("host" , "authorization" , "s" , "x-session-api-key" ):
78- continue
79- fwd_headers [key ] = val
80-
81- if SG_TOKEN :
82- fwd_headers ["Authorization" ] = f"token { SG_TOKEN } "
83- fwd_headers ["Host" ] = urllib .request .urlparse (SG_URL ).netloc
84-
85- req = urllib .request .Request (SG_URL , headers = fwd_headers , method = "GET" )
161+ fwd_headers = self ._build_fwd_headers ()
86162
87163 try :
88- with urllib .request .urlopen (req , timeout = 300 ) as resp :
89- resp_body = resp .read ()
90- self .send_response (resp .status )
91- for key , val in resp .getheaders ():
92- if key .lower () not in ("transfer-encoding" , "connection" ):
93- self .send_header (key , val )
94- self .end_headers ()
95- self .wfile .write (resp_body )
164+ status , resp_headers , resp_body = _forward_request (
165+ SG_URL , None , fwd_headers , "GET"
166+ )
167+ self .send_response (status )
168+ for key , val in resp_headers :
169+ if key .lower () not in ("transfer-encoding" , "connection" ):
170+ self .send_header (key , val )
171+ self .end_headers ()
172+ self .wfile .write (resp_body )
96173 except urllib .error .HTTPError as e :
97174 self .send_response (e .code )
98175 self .end_headers ()
@@ -120,6 +197,8 @@ def main():
120197 f .write (str (port ))
121198
122199 print (f"SG auth proxy listening on 127.0.0.1:{ port } -> { SG_URL } " , flush = True )
200+ if _BLOCKED_TOOLS :
201+ print (f" Blocked tools: { ', ' .join (sorted (_BLOCKED_TOOLS ))} " , flush = True )
123202 server .serve_forever ()
124203
125204
0 commit comments