From f516bfd1c37b98906c097dbe900ce05e7d12fae9 Mon Sep 17 00:00:00 2001 From: James Montgomery Date: Sat, 9 Nov 2024 12:38:34 -0500 Subject: [PATCH 1/3] Remove duplicate http client in get_version. --- include/ollama.hpp | 1 - singleheader/ollama.hpp | 1 - 2 files changed, 2 deletions(-) diff --git a/include/ollama.hpp b/include/ollama.hpp index cd19032..0c51828 100644 --- a/include/ollama.hpp +++ b/include/ollama.hpp @@ -835,7 +835,6 @@ class Ollama std::string get_version() { std::string version; - httplib::Client cli("http://localhost:11434"); auto res = this->cli->Get("/api/version"); diff --git a/singleheader/ollama.hpp b/singleheader/ollama.hpp index a23fcd1..aff5b4f 100644 --- a/singleheader/ollama.hpp +++ b/singleheader/ollama.hpp @@ -35625,7 +35625,6 @@ class Ollama std::string get_version() { std::string version; - httplib::Client cli("http://localhost:11434"); auto res = this->cli->Get("/api/version"); From e544eaf9fa78889bd1db5d4352107d023720054e Mon Sep 17 00:00:00 2001 From: James Montgomery Date: Sat, 9 Nov 2024 13:16:55 -0500 Subject: [PATCH 2/3] Clean up warnings and unused parameters. --- Makefile | 19 +++++++++++-------- include/ollama.hpp | 11 ++++++----- singleheader/ollama.hpp | 11 ++++++----- test/test.cpp | 2 +- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/Makefile b/Makefile index cb21e15..b5d7d8e 100644 --- a/Makefile +++ b/Makefile @@ -1,19 +1,22 @@ # Check if a default C++ compiler exists, otherwise use g++ CXX ?= g++ +CXXFLAGS = -Wall -Wextra -Wpedantic + +CREATE_BUILD_DIR = mkdir -p build; cp -n llama.jpg build; all: examples test-cpp11 test-cpp14 test-cpp20 examples: examples/main.cpp - mkdir -p build - $(CXX) examples/main.cpp -Iinclude -o build/examples -std=c++11 -pthread -latomic + $(CREATE_BUILD_DIR) + $(CXX) $(CXXFLAGS) examples/main.cpp -Iinclude -o build/examples -std=c++11 -pthread -latomic test: test-cpp11 test-cpp11: test/test.cpp - mkdir -p build - $(CXX) test/test.cpp -Iinclude -Itest -o build/test -std=c++11 -pthread -latomic + $(CREATE_BUILD_DIR) + $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test -std=c++11 -pthread -latomic test-cpp14: test/test.cpp - mkdir -p build - $(CXX) test/test.cpp -Iinclude -Itest -o build/test-cpp14 -std=c++14 -pthread -latomic + $(CREATE_BUILD_DIR) + $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp14 -std=c++14 -pthread -latomic test-cpp20: test/test.cpp - mkdir -p build - $(CXX) test/test.cpp -Iinclude -Itest -o build/test-cpp20 -std=c++2a -pthread -latomic + $(CREATE_BUILD_DIR) + $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp20 -std=c++2a -pthread -latomic clean: rm -rf build diff --git a/include/ollama.hpp b/include/ollama.hpp index 0c51828..aeacbbf 100644 --- a/include/ollama.hpp +++ b/include/ollama.hpp @@ -101,7 +101,7 @@ namespace ollama public: image(const std::string base64_sequence, bool valid = true) { - this->base64_sequence = base64_sequence; + this->base64_sequence = base64_sequence; this->valid = valid; } ~image(){}; @@ -254,7 +254,7 @@ namespace ollama (*this)["stream"] = stream; if (options!=nullptr) (*this)["options"] = options["options"]; - //(*this)["format"] = format; // Commented out as providing the format causes issues with some models. + (void)format; //(*this)["format"] = format; // Commented out as providing the format causes issues with some models. (*this)["keep_alive"] = keep_alive_duration; type = message_type::chat; @@ -329,7 +329,7 @@ namespace ollama return simple_string; } - const bool has_error() const + bool has_error() const { if ( json_data.contains("error") ) return true; return false; @@ -871,12 +871,13 @@ class Ollama private: +/* bool send_request(const ollama::request& request, std::function on_receive_response=nullptr) { return true; } - +*/ std::string server_url; httplib::Client *cli; @@ -1039,7 +1040,7 @@ namespace ollama ollama.setWriteTimeout(seconds); } -}; +} #endif \ No newline at end of file diff --git a/singleheader/ollama.hpp b/singleheader/ollama.hpp index aff5b4f..d76d0cd 100644 --- a/singleheader/ollama.hpp +++ b/singleheader/ollama.hpp @@ -34891,7 +34891,7 @@ namespace ollama public: image(const std::string base64_sequence, bool valid = true) { - this->base64_sequence = base64_sequence; + this->base64_sequence = base64_sequence; this->valid = valid; } ~image(){}; @@ -35044,7 +35044,7 @@ namespace ollama (*this)["stream"] = stream; if (options!=nullptr) (*this)["options"] = options["options"]; - //(*this)["format"] = format; // Commented out as providing the format causes issues with some models. + (void)format; //(*this)["format"] = format; // Commented out as providing the format causes issues with some models. (*this)["keep_alive"] = keep_alive_duration; type = message_type::chat; @@ -35119,7 +35119,7 @@ namespace ollama return simple_string; } - const bool has_error() const + bool has_error() const { if ( json_data.contains("error") ) return true; return false; @@ -35661,12 +35661,13 @@ class Ollama private: +/* bool send_request(const ollama::request& request, std::function on_receive_response=nullptr) { return true; } - +*/ std::string server_url; httplib::Client *cli; @@ -35829,7 +35830,7 @@ namespace ollama ollama.setWriteTimeout(seconds); } -}; +} #endif \ No newline at end of file diff --git a/test/test.cpp b/test/test.cpp index 34d47c4..296cfc9 100644 --- a/test/test.cpp +++ b/test/test.cpp @@ -106,7 +106,7 @@ TEST_SUITE("Ollama Tests") { try { ollama::generate("Non-existent-model", "Requesting this model will throw an error"); } - catch(ollama::exception e) { exception_handled = true; } + catch(ollama::exception& e) { exception_handled = true; } CHECK( exception_handled ); } From 717966db68e9657480e2f8c8eedaa4de89c5778f Mon Sep 17 00:00:00 2001 From: James Montgomery Date: Sat, 9 Nov 2024 13:46:37 -0500 Subject: [PATCH 3/3] Revise makefile to include warnings, copy test image. --- Makefile | 19 +++++++++++-------- examples/main.cpp | 5 +++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index b5d7d8e..39bf6ca 100644 --- a/Makefile +++ b/Makefile @@ -5,18 +5,21 @@ CXXFLAGS = -Wall -Wextra -Wpedantic CREATE_BUILD_DIR = mkdir -p build; cp -n llama.jpg build; all: examples test-cpp11 test-cpp14 test-cpp20 -examples: examples/main.cpp - $(CREATE_BUILD_DIR) +build: + mkdir -p build +ifeq ($(OS),Windows_NT) + if not exist "build/llama.jpg" copy "llama.jpg" "build" +else + cp -n llama.jpg build +endif +examples: build examples/main.cpp $(CXX) $(CXXFLAGS) examples/main.cpp -Iinclude -o build/examples -std=c++11 -pthread -latomic test: test-cpp11 -test-cpp11: test/test.cpp - $(CREATE_BUILD_DIR) +test-cpp11: build test/test.cpp $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test -std=c++11 -pthread -latomic -test-cpp14: test/test.cpp - $(CREATE_BUILD_DIR) +test-cpp14: build test/test.cpp $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp14 -std=c++14 -pthread -latomic -test-cpp20: test/test.cpp - $(CREATE_BUILD_DIR) +test-cpp20: build test/test.cpp $(CXX) $(CXXFLAGS) test/test.cpp -Iinclude -Itest -o build/test-cpp20 -std=c++2a -pthread -latomic clean: rm -rf build diff --git a/examples/main.cpp b/examples/main.cpp index 378fd4e..1d570ec 100644 --- a/examples/main.cpp +++ b/examples/main.cpp @@ -62,7 +62,7 @@ int main() // Create a blob on the ollama server using the following digest try { ollama::create_blob("sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2"); std::cout << "Blob was created on Ollama server." << std::endl; } - catch( ollama::exception e) { std::cout << "Error when creating blob: " << e.what() << std::endl;} + catch( ollama::exception& e) { std::cout << "Error when creating blob: " << e.what() << std::endl;} // Check if a blob with the following digest exists. if ( ollama::blob_exists("sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2") ) std::cout << "Blob exists on Ollama server." << std::endl; @@ -95,7 +95,7 @@ int main() try { ollama::generate("Non-existent-model", "Requesting this model will throw an error"); } - catch(ollama::exception e) { std::cout << e.what() << std::endl; } + catch(ollama::exception& e) { std::cout << e.what() << std::endl; } //Alternatively, throwing exceptions can be disabled. In this case, either emptry values or false will be returned in the event of an error. //ollama::allow_exceptions(false); @@ -117,6 +117,7 @@ int main() // Optionally send a request to ollama to load a model into memory. // This will occur automatically during generation but this allows you to preload a model before using it. bool model_loaded = ollama::load_model("llama3:8b"); + if (model_loaded) std::cout << "Model has been loaded"; // Perform a simple generation to a string by specifying a model and a prompt. The response will be returned as one string without streaming the reply. std::cout << ollama::generate("llama3:8b", "Why is the sky blue?") << std::endl;