Skip to content

Commit e5db3ae

Browse files
Merge pull request #232 from apify/feat/provide-tests
feat: add a base for tests
2 parents 5c98d8d + 5fd4bd5 commit e5db3ae

2 files changed

Lines changed: 70 additions & 1 deletion

File tree

packages/actor-scraper/sitemap-scraper/src/internals/consts.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import type {
22
Dictionary,
3-
ProxyConfigurationOptions,
3+
ProxyConfigurationOptions as CrawleeProxyConfigurationOptions,
44
RequestOptions,
55
} from '@crawlee/http';
66

@@ -10,6 +10,10 @@ export const enum ProxyRotation {
1010
UntilFailure = 'UNTIL_FAILURE',
1111
}
1212

13+
type ProxyConfigurationOptions = CrawleeProxyConfigurationOptions & {
14+
useApifyProxy?: boolean;
15+
};
16+
1317
/**
1418
* Replicates the INPUT_SCHEMA with JavaScript types for quick reference
1519
* and IDE type check integration.
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
import { log } from '@crawlee/http';
2+
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
3+
4+
import { type Input, ProxyRotation } from '../src/internals/consts.js';
5+
import { CrawlerSetup } from '../src/internals/crawler_setup.js';
6+
7+
vi.mock('apify', () => ({
8+
Actor: {
9+
isAtHome: () => true,
10+
getEnv: () => ({}),
11+
createProxyConfiguration: async () => ({
12+
newUrl: async () => undefined,
13+
}),
14+
fail: async (message: string) => new Error(message),
15+
},
16+
}));
17+
18+
const createInput = (overrides: Partial<Input> = {}): Input => ({
19+
startUrls: [{ url: 'https://example.com' }],
20+
keepUrlFragments: false,
21+
respectRobotsTxtFile: true,
22+
pageFunction: '() => ({})',
23+
proxyConfiguration: { useApifyProxy: false },
24+
proxyRotation: ProxyRotation.Recommended,
25+
maxRequestRetries: 3,
26+
maxCrawlingDepth: 0,
27+
debugLog: false,
28+
customData: {},
29+
...overrides,
30+
});
31+
32+
describe('CrawlerSetup', () => {
33+
let initSpy: ReturnType<typeof vi.spyOn>;
34+
35+
beforeEach(() => {
36+
initSpy = vi
37+
.spyOn(CrawlerSetup.prototype as any, '_initializeAsync')
38+
.mockResolvedValue(undefined);
39+
});
40+
41+
afterEach(() => {
42+
initSpy.mockRestore();
43+
});
44+
45+
it('sets debug log level when debugLog is true', async () => {
46+
const setLevelSpy = vi.spyOn(log, 'setLevel');
47+
48+
const setup = new CrawlerSetup(createInput({ debugLog: true }));
49+
50+
expect(setLevelSpy).toHaveBeenCalledWith(log.LEVELS.DEBUG);
51+
expect(setup).toBeInstanceOf(CrawlerSetup);
52+
});
53+
54+
it('stores rawInput as a JSON string', async () => {
55+
const input = createInput();
56+
const setup = new CrawlerSetup(input);
57+
58+
expect(setup.rawInput).toBe(JSON.stringify(input));
59+
});
60+
61+
it('uses the expected actor name', async () => {
62+
const setup = new CrawlerSetup(createInput());
63+
expect(setup.name).toBe('Sitemap Extractor');
64+
});
65+
});

0 commit comments

Comments
 (0)