Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions packages/gitbook/tests/robots.test.ts
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why put these tests in the tests for robots.txt 🤔 ?

markdown.test.ts would be better

Original file line number Diff line number Diff line change
@@ -1,6 +1,56 @@
import { describe, expect, it } from 'bun:test';
import { getContentTestURL } from './utils';

const TEST_PAGE_URL = 'https://gitbook.gitbook.io/test-gitbook-open/text-page';

describe('markdown serving based on user agent', () => {
it('should serve markdown to GPTBot (ua-match AI agent)', async () => {
const response = await fetch(getContentTestURL(TEST_PAGE_URL), {
headers: {
'User-Agent': 'GPTBot/1.2',
},
});

expect(response.status).toBe(200);
expect(response.headers.get('content-type')).toContain('text/markdown');
});

it('should serve markdown to ClaudeBot (ua-match AI agent)', async () => {
const response = await fetch(getContentTestURL(TEST_PAGE_URL), {
headers: {
'User-Agent': 'ClaudeBot/1.0',
},
});

expect(response.status).toBe(200);
expect(response.headers.get('content-type')).toContain('text/markdown');
});

it('should NOT serve markdown to Slackbot (heuristic detection only)', async () => {
const response = await fetch(getContentTestURL(TEST_PAGE_URL), {
headers: {
'User-Agent':
'Slackbot-LinkExpanding 1.0 (+https://api.slack.com/robots)',
},
});

expect(response.status).toBe(200);
expect(response.headers.get('content-type')).toContain('text/html');
});

it('should NOT serve markdown to Googlebot (traditional bot, not an AI agent)', async () => {
const response = await fetch(getContentTestURL(TEST_PAGE_URL), {
headers: {
'User-Agent':
'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
},
});

expect(response.status).toBe(200);
expect(response.headers.get('content-type')).toContain('text/html');
});
});

describe('robots.txt', () => {
it('declares allow content signals for public sites', async () => {
const response = await fetch(
Expand Down
Loading