Integrate with Mistral AI models for code generation and execution.
npm install @mistralai/mistralai cognitora
1import MistralClient from '@mistralai/mistralai';
2import { Cognitora } from 'cognitora';
3
4const client = new MistralClient(process.env.MISTRAL_API_KEY);
5const cognitora = new Cognitora({ apiKey: process.env.COGNITORA_API_KEY });
6
7async function runMistralCodeInterpreter(userQuery: string) {
8 const session = await cognitora.sessions.create({
9 image: 'python:3.11-slim',
10 timeout: 300,
11 persistent: true
12 });
13
14 const tools = [{
15 type: "function",
16 function: {
17 name: "execute_python",
18 description: "Execute Python code in a secure sandbox",
19 parameters: {
20 type: "object",
21 properties: {
22 code: {
23 type: "string",
24 description: "Python code to execute"
25 }
26 },
27 required: ["code"]
28 }
29 }
30 }];
31
32 const response = await client.chat({
33 model: 'mistral-large-latest',
34 messages: [
35 { role: 'system', content: 'You are a Python expert. Use the execute_python function to run code.' },
36 { role: 'user', content: userQuery }
37 ],
38 tools,
39 tool_choice: 'auto'
40 });
41
42 const toolCall = response.choices[0].message.tool_calls?.[0];
43
44 if (toolCall && toolCall.function.name === "execute_python") {
45 const { code } = JSON.parse(toolCall.function.arguments);
46
47 const execution = await cognitora.compute.execute({
48 sessionId: session.id,
49 command: ["python", "-c", code]
50 });
51
52 return {
53 code,
54 result: execution.stdout,
55 error: execution.stderr,
56 exitCode: execution.exitCode
57 };
58 }
59}
Integrate with GPT-4 and other OpenAI models for intelligent code generation and execution.
Build intelligent applications with Claude 3.5 Sonnet and other Anthropic models.
Build intelligent applications with Google's Gemini Pro models and function calling.
Get started with Mistral AI and Cognitora in minutes. Secure, scalable, and ready for anything.