Skip to content

Commit

Permalink
added llm_done_list endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
Endle committed Sep 1, 2024
1 parent 03daaa7 commit b8116d9
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 11 deletions.
28 changes: 20 additions & 8 deletions fireSeqSearch_addon/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -200,10 +200,8 @@ function createFireSeqDom(count) {

async function appendResultToSearchResult(serverInfo, parsedSearchResult, dom) {
const firefoxExtensionUserOption = await checkUserOptions();

consoleLogForDebug('Loaded user option: ' + JSON.stringify(firefoxExtensionUserOption));


function buildListItems(parsedSearchResult) {
const hitList = document.createElement("ul");
for (const record of parsedSearchResult) {
Expand Down Expand Up @@ -253,14 +251,29 @@ async function appendResultToSearchResult(serverInfo, parsedSearchResult, dom) {
}

async function processLlmSummary(serverInfo, parsedSearchResult, dom) {
async function keepRetryFetch(url) {
while(true) {
fetch(url, {
signal: AbortSignal.timeout(500)
}).then(response => {
let text = response.text();
console.log(text);
return text;
});
//setTimeout(() => {}, 10000);
break;
}
return "abc";
}
for (const record of parsedSearchResult) {
// TODO remove hard code port
const llm_api = "http://127.0.0.1:3030/summarize/" + record.title;
console.log("llm called");
console.log(record.title);
const response = await fetch(llm_api);
const text = await response.text();
console.log(text);
keepRetryFetch(llm_api).then(response => {
console.log("returned");
console.log(response);
});
}
}

Expand Down Expand Up @@ -318,19 +331,18 @@ function getSearchParameterFromCurrentPage() {
(function() {
const searchParameter = getSearchParameterFromCurrentPage();


addGlobalStyle(fireSeqSearchScriptCSS);

console.log("main to invoke");
//https://gomakethings.com/waiting-for-multiple-all-api-responses-to-complete-with-the-vanilla-js-promise.all-method/
Promise.all([
fetch("http://127.0.0.1:3030/server_info"),
fetch("http://127.0.0.1:3030/query/" + searchParameter)
]).then(function (responses) {
console.log("main to invoke");
return Promise.all(responses.map(function (response) {return response.json();}));
}).then(function (data) {
//consoleLogForDebug(data);
mainProcess(data);
//return appendResultToSearchResult(data);
}).then((_e) => {
const highlightedItems = document.querySelectorAll('.fireSeqSearchHighlight');
consoleLogForDebug(highlightedItems);
Expand Down
11 changes: 10 additions & 1 deletion fire_seq_search_server/src/http_client/endpoints.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::sync::Arc;
use log::debug;
use log::{debug, info};

use crate::query_engine::{QueryEngine, ServerInformation};
use axum::Json;
Expand Down Expand Up @@ -30,6 +30,15 @@ pub async fn summarize(
Html(r.await)
}

pub async fn get_llm_done_list(
State(engine_arc): State<Arc<QueryEngine>>
) -> Html<String>{

info!("get list endpoint called");
let r = engine_arc.get_llm_done_list();
Html(r.await)
}

pub async fn generate_word_cloud(State(engine_arc): State<Arc<QueryEngine>>)
-> Html<String> {
let div_id = "fireSeqSearchWordcloudRawJson";
Expand Down
15 changes: 13 additions & 2 deletions fire_seq_search_server/src/local_llm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ impl LlmEngine{
pub async fn call_llm_engine(&self) {

let health = self.health().await.unwrap();
if (health.slots_idle == 0) {
if health.slots_idle == 0 {
info!("No valid slot, continue");
return;
}
Expand All @@ -223,8 +223,9 @@ impl LlmEngine{
}
drop(jcache);

info!("Start summarize job: {}", &title);
let summarize_result = self.summarize(&doc.body).await;
info!("get summarize result {}", &title);
info!("Finished summarize job: {}", &title);

let mut jcache = self.job_cache.lock().await;//.unwrap();
next_job = jcache.job_queue.pop_front();
Expand All @@ -237,6 +238,16 @@ impl LlmEngine{
return jcache.done_job.get(title).cloned();
}

pub async fn get_llm_done_list(&self) -> String {
let mut r = Vec::new();
let jcache = self.job_cache.lock().await;
for (title, _text) in &jcache.done_job {
info!("already done : {}", &title);
r.push(title.to_owned());
}
return r.join("\n");
}

pub async fn health(&self) -> Result<HealthCheck, Box<dyn std::error::Error>> {
let res = self.client.get(self.endpoint.to_owned() + "/health")
.send()
Expand Down
1 change: 1 addition & 0 deletions fire_seq_search_server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ async fn main() {
.route("/server_info", get(endpoints::get_server_info))
.route("/wordcloud", get(endpoints::generate_word_cloud))
.route("/summarize/:title", get(endpoints::summarize))
.route("/llm_done_list", get(endpoints::get_llm_done_list))
.with_state(engine_arc.clone());

let listener = tokio::net::TcpListener::bind(&engine_arc.server_info.host)
Expand Down
10 changes: 10 additions & 0 deletions fire_seq_search_server/src/query_engine/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,16 @@ impl QueryEngine {
"LLM turned off".to_owned()
}
}
// TODO should serilize the whole vector
pub async fn get_llm_done_list(&self) -> String {
if cfg!(feature="llm") {
let llm = self.llm.as_ref().unwrap();
"Finished doc titles:\n====\n".to_owned()
+ &llm.get_llm_done_list().await
} else {
"LLM turned off".to_owned()
}
}
}

fn term_preprocess(term:String) -> String {
Expand Down

0 comments on commit b8116d9

Please sign in to comment.