Skip to content

Commit 7af234f

Browse files
committed
feat: resolveChatWrapper for mistral3
It will choose the `MistralChatWrapper` if the model architecture is `mistral3`
1 parent 305bff6 commit 7af234f

2 files changed

Lines changed: 32 additions & 0 deletions

File tree

src/chatWrappers/utils/resolveChatWrapper.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -454,6 +454,8 @@ export function resolveChatWrapper(
454454
return createSpecializedChatWrapper(FalconChatWrapper);
455455
else if (arch === "gemma" || arch === "gemma2")
456456
return createSpecializedChatWrapper(GemmaChatWrapper);
457+
else if (arch === "mistral3")
458+
return createSpecializedChatWrapper(MistralChatWrapper);
457459
}
458460

459461
return null;

test/standalone/chatWrappers/utils/resolveChatWrapper.test.ts

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -767,4 +767,34 @@ describe("resolveChatWrapper", () => {
767767
});
768768
expect(chatWrapper).to.be.instanceof(HarmonyChatWrapper);
769769
});
770+
771+
test("should resolve to MistralChatWrapper based on mistral3 architecture", () => {
772+
const chatWrapper = resolveChatWrapper({
773+
fileInfo: {
774+
version: 3,
775+
tensorCount: 0,
776+
metadata: {
777+
general: {
778+
architecture: "mistral3",
779+
// eslint-disable-next-line camelcase
780+
quantization_version: "1"
781+
},
782+
tokenizer: {
783+
ggml: {
784+
model: "llama",
785+
tokens: [],
786+
// eslint-disable-next-line camelcase
787+
token_type: []
788+
}
789+
}
790+
} as any,
791+
metadataSize: 0,
792+
architectureMetadata: {} as any,
793+
splicedParts: 1,
794+
totalTensorCount: 0,
795+
totalMetadataSize: 0
796+
}
797+
});
798+
expect(chatWrapper).to.be.instanceof(MistralChatWrapper);
799+
});
770800
});

0 commit comments

Comments
 (0)