commit 2e01e3427878ecc7cb1bf9970c670d79eb37f850 Author: Frederico @ VilaRosa02 Date: Tue Sep 9 10:06:07 2025 +0000 init diff --git a/LlamaCli.func.php b/LlamaCli.func.php new file mode 100644 index 0000000..bf0800f --- /dev/null +++ b/LlamaCli.func.php @@ -0,0 +1,77 @@ +"assistant","content"=>"How can I help you?"],["role"=>"user","content"=>"Tell me the capital of France"]] + $debug_level = $debug_level ?? 0; + + // 1. Parse the prompt and resolve actions "@" + $prompt = ResolveActions($prompt); + + + // $llm_stdin = $prompt; + // Llama chat template + $llm_stdin = "<|start_header_id|>system<|end_header_id|>$system<|eot_id|>"; + foreach($previousConversation as $message) + $llm_stdin .= "<|start_header_id|>{$message["role"]}<|end_header_id|>{$message["content"]}<|eot_id|>"; + $llm_stdin .= "<|start_header_id|>user<|end_header_id|>$prompt<|eot_id|>"; + $llm_stdin .= "<|start_header_id|>assistant<|end_header_id|>"; + + // Create a unique temporary file for STDIN, input data + $input_file_name = tempnam(sys_get_temp_dir(), 'tmp_'); + if ($debug_level >= 4) file_put_contents("php://stderr","input_file_name $input_file_name content: $llm_stdin\n"); + file_put_contents($input_file_name, $llm_stdin); + + // Create a unique temporary file for STDERR, debug data + $debug_file_name = tempnam(sys_get_temp_dir(), 'tmp_'); + if ($debug_file_name === false) die("Could not create temporary file\n"); + + + + $command = "$llamacpp_bin -m $llamacpp_model_path/$llamacpp_model -f $input_file_name -no-cnv --simple-io 2> $debug_file_name "; + if ($debug_level >= 3) file_put_contents("php://stderr","Attempting command: $command\n"); + $output = shell_exec($command); + + + // + $debug_data = file_get_contents($debug_file_name); + if ($debug_level >= 5) file_put_contents("php://stderr","debug_file_name $debug_file_name content: $debug_data\n"); + + // Clean TEMP files + if (file_exists($debug_file_name)) unlink($debug_file_name); + if (file_exists($input_file_name)) unlink($input_file_name); + + // + // 5. Erase the input from the output + // + + // 5.1 Llama specific model. produces in this format: + $output = str_replace("system$system", "", $output); + $output = str_replace("user$prompt"."assistant", "", $output); + foreach($previousConversation as $message) + $output = str_replace($message["role"].$message["content"],"",$output); + $output = str_replace("[end of text]", "", $output); + // 5.2 Generic input + //$output = str_replace($llm_stdin, "", $output); + // 5.3 Sanitize output + $output = trim($output); + if ($parseCodeInOutput) + $output = ExtractCodeSections($output); + + + return $output; +} + +function LlamaCli_raw(string $prompt, string $system, array $options = []) { $options["parseCodeInOutput"] = false; return LlamaCli($prompt, $system,$options); } diff --git a/lib/ExtractCodeSections.function.php b/lib/ExtractCodeSections.function.php new file mode 100644 index 0000000..58f5029 --- /dev/null +++ b/lib/ExtractCodeSections.function.php @@ -0,0 +1,33 @@ + $url, + CURLOPT_RETURNTRANSFER => true, + CURLOPT_FOLLOWLOCATION => true, + CURLOPT_TIMEOUT => 10, + CURLOPT_USERAGENT => "PHP cURL RSS Reader" + ]); + + $response = curl_exec($ch); + + if (curl_errno($ch)) die("cURL error: " . curl_error($ch)); + curl_close($ch); + + // Parse as XML + $xml = @simplexml_load_string($response); + + if (!$xml) die("Failed to parse XML."); + + // Find first + $firstTitle = ''; + if (isset($xml->channel->item[0]->title)) { + // RSS 2.0 style + $firstTitle = (string)$xml->channel->item[0]->title; + } elseif (isset($xml->entry[0]->title)) { + // Atom style + $firstTitle = (string)$xml->entry[0]->title; + } elseif (isset($xml->title)) { + // fallback + $firstTitle = (string)$xml->title; + } + + + $text = str_replace($matches[0],$firstTitle,$text); + } + + return $text; + +} diff --git a/lib/index.php b/lib/index.php new file mode 100644 index 0000000..f2ec58a --- /dev/null +++ b/lib/index.php @@ -0,0 +1,6 @@ +<?php + +foreach(scandir(__DIR__) as $filename) { + if ($filename == "." || $filename == "..") continue; + if (substr($filename, -13) == ".function.php") include __DIR__."/$filename"; +} diff --git a/test.php b/test.php new file mode 100644 index 0000000..95a53d3 --- /dev/null +++ b/test.php @@ -0,0 +1,74 @@ +<?php require_once __DIR__."/LlamaCli.func.php"; + +// TEST 01: Simple query. + +// $out = LlamaCli("What is the capital of France?"); print_r($out); + +// TEST 02: Simple query with previous background conversation + +// $out = LlamaCli("Portugal?","",["previousConversation"=>[["role"=>"user","content"=>"What is the capital of France?"],["role"=>"assistant","content"=>"Paris."]]]); print_r($out); + + +// TEST 03: Simple query with SYSTEM PRIMING message + +//$summary = LlamaCli(file_get_contents("test_input_data.3.txt"),"You are an executive assistant that will summarize english text in 1 paragraph"); +//$title = LlamaCli($summary[0],"You are an talented journalist that will produce a provocative headline title based on a summary of a text"); + + +// TEST 04: RECURSIVE QUERY, to produce long-content from short-one. LONG-WRITING. ( GOAL: Learning from model deep knowledge ) + +// // -------- BOOK EXAMPLE --------- +// +// // 0. CATEGORY +// $category = "learning finnish"; +// $expert = "teacher"; +// $total_no_of_chapters = 3; +// +// +// // 1. TITLE +// $book_title = LlamaCli_raw("Write me a title for my new book about $category. Output title only, no options, no chat.","You are an expert $expert."); +// +// +// // 2. CHAPTER TITLES +// $sys_msg = "You are an expert $expert writing a book with the title $book_title with $total_no_of_chapters chapters"; +// $chapters = []; $conv_hist = []; +// +// $msg = "Write me the title of my first chapter. Output title only, no options, no chat."; +// $conv_hist[] = ["role"=>"user","content"=>$msg]; +// $conv_hist[] = ["role"=>"assistant","content"=>($chapters[] = LlamaCli_raw($msg,$sys_msg,["debug_level"=>0]))]; +// +// for($no=1; $no < $total_no_of_chapters; $no++) { +// $msg = "Write me the title of chapter number $no."; +// $conv_hist[] = ["role"=>"user","content"=>$msg]; +// $conv_hist[] = ["role"=>"assistant","content"=>($chapters[] = LlamaCli_raw($msg,$sys_msg,["previousConversation"=>$conv_hist, "debug_level"=>0]))]; +// +// } +// +// // 3. CHAPTER CONTENTS +// $content = []; +// foreach($chapters as $chapter_title) +// $content[$chapter_title] = LlamaCli_raw( +// "Write 2 paragraphs for a chapter titled $chapter_title in a book called $book_title. Output content only, no chat.", +// "You are an expert $expert." +// ); +// +// print_r([$book_title, $content]); + + + +// TEST 05: CHAINED QUERY, to parse long-content and produce short one. SUMMARIZATION. ( GOAL: ) + + +// $summaries = []; +// $content = file_get_contents("test_input_data.2.txt"); +// $chunks = array_chunk(explode("\n",$content),80); +// +// foreach($chunks as $chunk) { +// $summaries[] = LlamaCli_raw(implode("\n",$chunk),"You are a legal executive assistant that will summarize a conversation in english between ex-husband and wife in 1 paragraph"); +// } +// print_r($summaries); + + +// TEST 06: Resolve ACTIONS + +echo LlamaCli_raw("translate this title to portuguese and finnish: @rss_reader https://feeds.bbci.co.uk/news/rss.xml cde def","");