Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/fix-js-sdk-upload-content-length.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"e2b": patch
---

fix(js-sdk): buffer template tar archive before upload so `fetch` sets `Content-Length` instead of falling back to `Transfer-Encoding: chunked`. S3 presigned PUT URLs reject chunked requests with `501 NotImplemented`, breaking template uploads in self-hosted deployments backed by S3-compatible storage. Aligns the JS SDK with the Python SDK, which already buffers via `io.BytesIO`.
23 changes: 18 additions & 5 deletions packages/js-sdk/src/template/buildApi.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { ApiClient, handleApiError, paths, components } from '../api'
import { stripAnsi } from '../utils'
import { dynamicImport, stripAnsi } from '../utils'
import { BuildError, FileUploadError, TemplateError } from '../errors'
import { LogEntry } from './logger'
import { getBuildStepIndex, tarFileStreamUpload } from './utils'
Expand Down Expand Up @@ -119,12 +119,25 @@ export async function uploadFile(
resolveSymlinks
)

// The compiler assumes this is Web fetch API, but it's actually Node.js fetch API
// Buffer the archive before uploading so fetch sets Content-Length.
// S3 presigned PUT URLs reject Transfer-Encoding: chunked with 501
// NotImplemented, which is what Node's fetch falls back to when the
// body is a Readable without a known length. See e2b-dev/e2b#1243.
// The Python SDK takes the same approach (build_api.py:upload_file).
// Dynamically import so the browser bundle doesn't pull in node:stream.
// tar's Pack extends Minipass and is iterable as AsyncIterable<Buffer> at
// runtime, but the cli's tsconfig (preserveSymlinks) doesn't surface that
// through the type chain — cast via unknown.
const { buffer } = await dynamicImport<
typeof import('node:stream/consumers')
>('node:stream/consumers')
const uploadBody = await buffer(
uploadStream as unknown as AsyncIterable<Buffer>
)

const res = await fetch(url, {
method: 'PUT',
// @ts-expect-error
body: uploadStream,
duplex: 'half',
body: uploadBody,
})

if (!res.ok) {
Expand Down
68 changes: 68 additions & 0 deletions packages/js-sdk/tests/template/uploadFile.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import { describe, test, expect, beforeAll, afterAll } from 'vitest'
import { writeFile, mkdtemp, rm } from 'fs/promises'
import { join } from 'path'
import { tmpdir } from 'os'
import { createServer, type IncomingMessage, type Server } from 'http'
import { AddressInfo } from 'net'
import { uploadFile } from '../../src/template/buildApi'

// Regression test for e2b-dev/e2b#1243 — uploadFile used to pass a Node
// Readable directly to fetch, which made undici fall back to
// Transfer-Encoding: chunked. S3 presigned PUT URLs reject that with 501
// NotImplemented. The fix buffers the archive first so Content-Length is set.
describe('uploadFile transfer encoding', () => {
let testDir: string
let server: Server
let baseUrl: string
let capturedHeaders: IncomingMessage['headers'] = {}
let capturedBodyLength = 0

beforeAll(async () => {
testDir = await mkdtemp(join(tmpdir(), 'uploadFile-test-'))
await writeFile(join(testDir, 'hello.txt'), 'hello world')

server = createServer((req, res) => {
capturedHeaders = req.headers
let bytes = 0
req.on('data', (chunk: Buffer) => {
bytes += chunk.length
})
req.on('end', () => {
capturedBodyLength = bytes
res.writeHead(200)
res.end()
})
})
await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve))
const { port } = server.address() as AddressInfo
baseUrl = `http://127.0.0.1:${port}/upload`
})

afterAll(async () => {
await new Promise<void>((resolve) => server.close(() => resolve()))
await rm(testDir, { recursive: true, force: true })
})

test('sets Content-Length and does not use chunked transfer encoding', async () => {
await uploadFile(
{
fileName: '*.txt',
fileContextPath: testDir,
url: baseUrl,
ignorePatterns: [],
resolveSymlinks: false,
},
undefined
)

expect(capturedHeaders['content-length']).toBeDefined()
const contentLength = Number(capturedHeaders['content-length'])
expect(contentLength).toBeGreaterThan(0)
expect(contentLength).toBe(capturedBodyLength)

const transferEncoding = capturedHeaders['transfer-encoding']
if (transferEncoding !== undefined) {
expect(transferEncoding.toLowerCase()).not.toContain('chunked')
}
})
})