| |
| |
|
|
|
|
| import {HumanMessage, SystemMessage, LlamaCppLLM, Runnable} from '../../../../src/index.js';
|
|
|
|
|
| class PromptFormatter extends Runnable {
|
| constructor(systemPrompt = "You are a helpful assistant. Be concise.") {
|
| super();
|
| this.systemPrompt = systemPrompt;
|
| }
|
|
|
| async _call(input, config) {
|
| return [
|
| new SystemMessage(this.systemPrompt),
|
| new HumanMessage(input)
|
| ];
|
| }
|
| }
|
|
|
|
|
| class ResponseParser extends Runnable {
|
| async _call(input, config) {
|
|
|
| if (input.content) {
|
| return input.content.trim();
|
| }
|
| return String(input).trim();
|
| }
|
| }
|
|
|
|
|
| class AnswerValidator extends Runnable {
|
| constructor(minLength = 10) {
|
| super();
|
| this.minLength = minLength;
|
| }
|
|
|
| async _call(input, config) {
|
| if (input.length < this.minLength) {
|
| return `Error: Response too short (${input.length} chars, min ${this.minLength})`;
|
| }
|
| return input;
|
| }
|
| }
|
|
|
| async function exercise4() {
|
| console.log('=== Exercise 4: Composition and Pipelines ===\n');
|
|
|
| const llm = new LlamaCppLLM({
|
| modelPath: './models/Meta-Llama-3.1-8B-Instruct-Q5_K_S.gguf',
|
| temperature: 0.7,
|
| maxTokens: 100
|
| });
|
|
|
| try {
|
|
|
| console.log('Part 1: Testing individual components');
|
|
|
| const formatter = new PromptFormatter();
|
| const parser = new ResponseParser();
|
| const validator = new AnswerValidator();
|
|
|
| console.log('Testing formatter:');
|
| const formatted = await formatter.invoke("What is AI?");
|
| console.log(formatted);
|
| console.log();
|
|
|
| console.log('Testing LLM + parser:');
|
| const llmResponse = await llm.invoke(formatted);
|
| const parsed = await parser.invoke(llmResponse);
|
| console.log('Parsed:', parsed);
|
| console.log();
|
|
|
| console.log('Testing validator with short input:');
|
| const shortResult = await validator.invoke("Hi");
|
| console.log(shortResult);
|
| console.log();
|
|
|
|
|
| console.log('Part 2: Complete pipeline');
|
|
|
|
|
| const pipeline = formatter
|
| .pipe(llm)
|
| .pipe(parser)
|
| .pipe(validator);
|
|
|
| console.log('Pipeline structure:', pipeline.toString());
|
|
|
| const result1 = await pipeline.invoke("What is machine learning?");
|
| console.log('Result:', result1);
|
| console.log();
|
|
|
|
|
| console.log('Part 3: Reusable agent pipeline');
|
|
|
|
|
| const creativeLLM = new LlamaCppLLM({
|
| modelPath: './models/Meta-Llama-3.1-8B-Instruct-Q5_K_S.gguf',
|
| temperature: 0.9,
|
| maxTokens: 100
|
| });
|
|
|
| const creativeFormatter = new PromptFormatter(
|
| "You are a creative writer. Use vivid imagery."
|
| );
|
|
|
| const creativePipeline = creativeFormatter
|
| .pipe(creativeLLM)
|
| .pipe(parser);
|
|
|
|
|
| const factualLLM = new LlamaCppLLM({
|
| modelPath: './models/Meta-Llama-3.1-8B-Instruct-Q5_K_S.gguf',
|
| temperature: 0.1,
|
| maxTokens: 100
|
| });
|
|
|
| const factualFormatter = new PromptFormatter(
|
| "You are a factual encyclopedia. Be precise and accurate."
|
| );
|
|
|
| const factualPipeline = factualFormatter
|
| .pipe(factualLLM)
|
| .pipe(parser)
|
| .pipe(validator);
|
|
|
| console.log('Creative (temp=0.9):');
|
| const creative = await creativePipeline.invoke("Describe a sunset");
|
| console.log(creative);
|
| console.log();
|
|
|
| console.log('Factual (temp=0.1):');
|
| const factual = await factualPipeline.invoke("What is the capital of France?");
|
| console.log(factual);
|
| console.log();
|
|
|
|
|
| console.log('Part 4: Batch processing with pipeline');
|
|
|
| const questions = [
|
| "What is Python?",
|
| "What is JavaScript?",
|
| "What is Rust?"
|
| ];
|
|
|
| const answers = await pipeline.batch(questions);
|
|
|
| questions.forEach((q, i) => {
|
| console.log(`Q: ${q}`);
|
| console.log(`A: ${answers[i]}`);
|
| console.log();
|
| });
|
|
|
|
|
| await creativeLLM.dispose();
|
| await factualLLM.dispose();
|
|
|
| } finally {
|
| await llm.dispose();
|
| }
|
|
|
| console.log('\n✓ Exercise 4 complete!');
|
| }
|
|
|
|
|
| exercise4().catch(console.error);
|
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |