Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 38 additions & 3 deletions packages/cubejs-schema-compiler/src/compiler/DataSchemaCompiler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ const NATIVE_IS_SUPPORTED = isNativeSupported();
const moduleFileCache = {};

const JINJA_SYNTAX = /{%|%}|{{|}}/ig;
const JINJA_MACRO_DEFINITION = /{%[-+]?\s*macro\s/;

const getThreadsCount = () => {
const envThreads = getEnv('transpilationWorkerThreadsCount');
Expand Down Expand Up @@ -100,6 +101,7 @@ export type TranspileOptions = {
compilerId?: string;
stage?: 0 | 1 | 2 | 3;
jinjaUsed?: boolean;
jinjaMacrosFingerprint?: string;
};

export type CompileStage = 0 | 1 | 2 | 3;
Expand Down Expand Up @@ -280,6 +282,8 @@ export class DataSchemaCompiler {
this.loadJinjaTemplates(jinjaTemplatedFiles);
}

const jinjaMacrosFingerprint = DataSchemaCompiler.computeJinjaMacrosFingerprint(jinjaTemplatedFiles);

const errorsReport = new ErrorReporter(null, [], this.errorReportOptions);
this.errorsReporter = errorsReport;

Expand Down Expand Up @@ -328,11 +332,11 @@ export class DataSchemaCompiler {
}

const jinjaFilesTasks = jinjaTemplatedFiles
.map(f => this.transpileJinjaFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames }));
.map(f => this.transpileJinjaFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames, jinjaMacrosFingerprint }));

results = (await Promise.all([...jsFilesTasks, ...yamlFilesTasks, ...jinjaFilesTasks])).flat();
} else {
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames })));
results = await Promise.all(toCompile.map(f => this.transpileFile(f, errorsReport, { cubeNames, cubeSymbols, transpilerNames, jinjaMacrosFingerprint })));
}

return results.filter(f => !!f) as FileContent[];
Expand Down Expand Up @@ -576,6 +580,33 @@ export class DataSchemaCompiler {
});
}

/**
* Macro files are hidden dependencies of any cube file that imports them —
* minijinja resolves `{% import %}` lazily against its template store, so
* the per-file Jinja render cache must be invalidated when *any* macro file
* changes. Hashing all macro files together rather than tracking per-cube
* imports keeps the implementation simple at the cost of over-invalidating
* when macro edits happen (which is rare). CUB-2357.
*/
private static computeJinjaMacrosFingerprint(files: FileContent[]): string {
const macroFiles = files
.filter((f) => JINJA_MACRO_DEFINITION.test(f.content))
.sort((a, b) => a.fileName.localeCompare(b.fileName));

if (macroFiles.length === 0) {
return '';
}

const hash = crypto.createHash('md5');
for (const f of macroFiles) {
hash.update(f.fileName);
hash.update('\0');
hash.update(f.content);
hash.update('\0');
}
return hash.digest('hex');
}

private prepareTranspileSymbols() {
const cubeNames: string[] = this.cubeDictionary.cubeNames();
// We need only cubes and all its member names for transpiling.
Expand Down Expand Up @@ -802,7 +833,11 @@ export class DataSchemaCompiler {
errorsReport: ErrorReporter,
options: TranspileOptions
): Promise<(FileContent | undefined)> {
const cacheKey = crypto.createHash('md5').update(file.content).digest('hex');
const cacheKey = crypto.createHash('md5')
.update(file.content)
.update('\0')
.update(options.jinjaMacrosFingerprint || '')
.digest('hex');

let renderedFileContent: string;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { LRUCache } from 'lru-cache';
import { FileContent, isNativeSupported } from '@cubejs-backend/shared';

import { prepareCompiler } from '../../../src/compiler/PrepareCompiler';

const suite = isNativeSupported() === true ? describe : xdescribe;

const cubeFile = (name: string, extraDimsBlock: string): string => `{% import 'macros.yml' as macros %}

cubes:
- name: ${name}
sql: >
SELECT 1 AS id

dimensions:
- name: id
sql: id
type: number
primary_key: true
${extraDimsBlock}
`;

const macroFile = (dimensionName: string) => `{% macro dimensions() %}
- name: ${dimensionName}
sql: ${dimensionName}
type: string
{% endmacro %}
`;

async function compileWith(files: FileContent[], compiledJinjaCache: LRUCache<string, string>) {
const repo = {
localPath: () => __dirname,
dataSchemaFiles: () => Promise.resolve(files),
};

const { compiler, metaTransformer } = prepareCompiler(repo, {
adapter: 'postgres',
compiledJinjaCache,
} as any);

await compiler.compile();

return { metaTransformer };
}

function dimensionNames(metaTransformer: any, cubeName: string): string[] {
const cube = metaTransformer.cubes.find((c: any) => c.config.name === cubeName);
return cube.config.dimensions.map((d: any) => d.name);
}

suite('Jinja macro cache invalidation', () => {
it('invalidates the cube file render cache when a macro file changes (CUB-2357)', async () => {
const sharedCache = new LRUCache<string, string>({ max: 250 });

const filesV1: FileContent[] = [
{ fileName: 'orders.yml', content: cubeFile('orders', '{{ macros.dimensions() }}') },
{ fileName: 'macros.yml', content: macroFile('status') },
];

const v1 = await compileWith(filesV1, sharedCache);
expect(dimensionNames(v1.metaTransformer, 'orders')).toEqual(['orders.id', 'orders.status']);

const filesV2: FileContent[] = [
filesV1[0],
{ fileName: 'macros.yml', content: macroFile('priority') },
];

const v2 = await compileWith(filesV2, sharedCache);
expect(dimensionNames(v2.metaTransformer, 'orders')).toEqual(['orders.id', 'orders.priority']);
});

it('reuses the render cache for unchanged cube files when a sibling cube file changes', async () => {
const sharedCache = new LRUCache<string, string>({ max: 250 });

const filesV1: FileContent[] = [
{ fileName: 'orders.yml', content: cubeFile('orders', '') },
{ fileName: 'products.yml', content: cubeFile('products', '') },
{ fileName: 'macros.yml', content: macroFile('unused') },
];
await compileWith(filesV1, sharedCache);

const cacheSizeAfterFirstCompile = sharedCache.size;

const filesV2: FileContent[] = [
{
fileName: 'orders.yml',
content: cubeFile('orders', ' - name: status\n sql: status\n type: string\n'),
},
filesV1[1],
filesV1[2],
];
await compileWith(filesV2, sharedCache);

// Only the changed orders.yml should miss the cache; products.yml and
// macros.yml are byte-identical and the macros fingerprint is unchanged.
expect(sharedCache.size).toBe(cacheSizeAfterFirstCompile + 1);
});
});
Loading