| |
| |
|
|
|
|
| import {HumanMessage, SystemMessage, LlamaCppLLM} from '../../../../src/index.js';
|
| import {QwenChatWrapper} from "node-llama-cpp";
|
|
|
| async function exercise1() {
|
| console.log('=== Exercise 1: Basic LLM Setup ===\n');
|
|
|
|
|
| const llm = new LlamaCppLLM({
|
| modelPath: './models/Qwen3-1.7B-Q6_K.gguf',
|
| temperature: 0.7,
|
| maxTokens: 100,
|
| chatWrapper: new QwenChatWrapper({
|
| thoughts: 'discourage'
|
| }),
|
| verbose: true
|
| });
|
|
|
| try {
|
|
|
| console.log('Part 1: Simple string input');
|
| const response1 = await llm.invoke("What is 2+2? Answer in one sentence");
|
| console.log('Response:', response1.content);
|
| console.log();
|
|
|
|
|
| console.log('Part 2: Using message objects');
|
| const messages = [
|
| new SystemMessage("You are a patient math tutor teaching a 10-year-old. Always explain the reasoning step-by-step in simple terms."),
|
| new HumanMessage("What is 5*5? Answer in one sentence.")
|
| ];
|
|
|
| const response2 = await llm.invoke(messages);
|
| console.log('Response:', response2.content);
|
| console.log();
|
|
|
|
|
| console.log('Part 3: Temperature differences');
|
| console.log('Temperature controls randomness: 0.0 = deterministic, 1.0 = creative\n');
|
| const question = "Give me one adjective to describe winter:";
|
|
|
| console.log('Low temperature (0.1):');
|
| llm._chatSession.setChatHistory([]);
|
| const lowTemp = await llm.invoke(question, { temperature: 0.1 });
|
|
|
| console.log(lowTemp.content);
|
|
|
| console.log('\nHigh temperature (0.9):');
|
| llm._chatSession.setChatHistory([]);
|
| const highTemp = await llm.invoke(question, { temperature: 0.9 });
|
|
|
| console.log(highTemp.content);
|
|
|
| } finally {
|
|
|
| await llm.dispose();
|
| console.log('\n✓ Resources cleaned up');
|
| }
|
|
|
| console.log('\n✓ Exercise 1 complete!');
|
| }
|
|
|
|
|
| exercise1().catch(console.error);
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
|
|
|
|