Skip to content

Commit

Permalink
AI Cache: Normalize strings before creating fingerprint to handle mul…
Browse files Browse the repository at this point in the history
…tiple platforms (#1017)

* AI Cache: Normalize strings before creating fingerprint to handle multiple platforms

* AI Cache results enhancements + fix generateFlowMarkdownFile variable issue

AI Cache results enhancements
  - Normalize strings before creating fingerprint to handle multiple platforms
  - Delete unused cache files
- Fix variables mistmatch when calling `generateFlowMarkdownFile`

* typo
  • Loading branch information
nvuillam authored Jan 19, 2025
1 parent 2fa83ca commit 179315b
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 24 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@

Note: Can be used with `sfdx plugins:install sfdx-hardis@beta` and docker image `hardisgroupcom/sfdx-hardis@beta`

## [5.16.1] 2025-01-19

- AI Cache results enhancements
- Normalize strings before creating fingerprint to handle multiple platforms
- Delete unused cache files
- Fix variables mismatch when calling `generateFlowMarkdownFile`

## [5.16.0] 2025-01-19

- New AI Provider: Agentforce
Expand Down
2 changes: 1 addition & 1 deletion src/commands/hardis/doc/flow2markdown.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ If [AI integration](${CONSTANTS.DOC_URL_ROOT}/salesforce-ai-setup/) is configure

uxLog(this, c.grey(`Generating markdown for Flow ${this.inputFile}...`));
const flowXml = (await fs.readFile(this.inputFile, "utf8")).toString();
const genRes = await generateFlowMarkdownFile(this.inputFile, flowXml, this.outputFile, { collapsedDetails: false, describeWithAi: true });
const genRes = await generateFlowMarkdownFile(path.basename(this.inputFile, ".flow-meta.xml"), flowXml, this.outputFile, { collapsedDetails: false, describeWithAi: true });
if (!genRes) {
throw new Error("Error generating markdown file");
}
Expand Down
2 changes: 1 addition & 1 deletion src/commands/hardis/doc/project2markdown.ts
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ ${Project2Markdown.htmlInstructions}
flowSkips.push(flowFile);
continue;
}
const genRes = await generateFlowMarkdownFile(flowFile, flowXml, outputFlowMdFile, { collapsedDetails: false, describeWithAi: true });
const genRes = await generateFlowMarkdownFile(flowName, flowXml, outputFlowMdFile, { collapsedDetails: false, describeWithAi: true });
if (!genRes) {
flowErrors.push(flowFile);
continue;
Expand Down
27 changes: 19 additions & 8 deletions src/common/aiProvider/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ export class UtilsAi {
return process.env.PROMPTS_LANGUAGE || "en";
}

public static async findAiCache(template: PromptTemplate, promptParameters: any[]): Promise<{ success: boolean, cacheText?: string, fingerPrint: string, aiCacheDirFile: string }> {
public static async findAiCache(template: PromptTemplate, promptParameters: any[], uniqueId: string): Promise<{ success: boolean, cacheText?: string, fingerPrint: string, aiCacheDirFile: string }> {
const fingerPrint = this.getFingerPrint(promptParameters);
const lang = this.getPromptsLanguage();
const aiCacheDirFile = path.join("docs", "cache-ai-results", `${lang}-${template}-${fingerPrint}.md`);
const aiCacheDirFile = path.join("docs", "cache-ai-results", `${lang}-${template}-${uniqueId}-${fingerPrint}.md`);
if (process.env?.IGNORE_AI_CACHE === "true") {
return { success: false, fingerPrint, aiCacheDirFile: aiCacheDirFile.replace(/\\/g, '/') };
}
Expand All @@ -38,30 +38,41 @@ export class UtilsAi {
return { success: false, fingerPrint, aiCacheDirFile: aiCacheDirFile.replace(/\\/g, '/') };
}

public static async writeAiCache(template: PromptTemplate, promptParameters: any[], aiCacheText: string): Promise<void> {
public static async writeAiCache(template: PromptTemplate, promptParameters: any[], uniqueId: string, aiCacheText: string): Promise<void> {
const fingerPrint = this.getFingerPrint(promptParameters);
const aiCacheDir = path.join("docs", "cache-ai-results");
await fs.ensureDir(aiCacheDir);
const lang = this.getPromptsLanguage();
const aiCacheDirFile = path.join(aiCacheDir, `${lang}-${template}-${fingerPrint}.md`);
const aiCacheDirFile = path.join(aiCacheDir, `${lang}-${template}-${uniqueId}-${fingerPrint}.md`);
const otherCacheFiles = fs.readdirSync(aiCacheDir).filter((file) => file.includes(`${lang}-${template}-${uniqueId}`) && !file.includes(fingerPrint));
for (const otherCacheFile of otherCacheFiles) {
await fs.remove(path.join(aiCacheDir, otherCacheFile));
}
await fs.writeFile(aiCacheDirFile, aiCacheText);
}

public static getFingerPrint(promptParameters: any[]): string {
const parametersFingerPrints = promptParameters.map((promptParameter) => {
if (typeof promptParameter === "string" && promptParameter.includes("<xml")) {
try {
const xmlObj = new XMLParser().parse(promptParameter);
return farmhash.fingerprint32(JSON.stringify(xmlObj));
const xmlObj = new XMLParser().parse(UtilsAi.normalizeString(promptParameter));
return farmhash.fingerprint32(UtilsAi.normalizeString(JSON.stringify(xmlObj)));
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
catch (e) {
return farmhash.fingerprint32(promptParameter);
return farmhash.fingerprint32(UtilsAi.normalizeString(promptParameter));
}
}
return farmhash.fingerprint32(JSON.stringify(promptParameter));
else if (typeof promptParameter === "string") {
return farmhash.fingerprint32(UtilsAi.normalizeString(promptParameter));
}
return farmhash.fingerprint32(UtilsAi.normalizeString(JSON.stringify(promptParameter)));
});
return parametersFingerPrints.join("-");
}

public static normalizeString(str: string) {
return str.normalize().trim().replace(/[\u200B-\u200D\uFEFF]/g, "").replace(/\r\n/g, '\n');
}

}
10 changes: 5 additions & 5 deletions src/common/utils/docUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ export class SalesforceSetupUrlBuilder {

if (!pathTemplate) {
if (!alreadySaid.includes(metadataType)) {
uxLog(this, c.grey(`Unsupported metadata type: ${metadataType}`));
uxLog(this, c.grey(`Unsupported metadata type for doc quick link: ${metadataType}`));
alreadySaid.push(metadataType);
}
return null;
Expand Down Expand Up @@ -252,7 +252,7 @@ export async function completeAttributesDescriptionWithAi(attributesMarkdown: st
if (!attributesMarkdown) {
return attributesMarkdown;
}
const aiCache = await UtilsAi.findAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown]);
const aiCache = await UtilsAi.findAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown], objectName);
if (aiCache.success === true) {
uxLog(this, c.grey("Used AI cache for attributes completion (set IGNORE_AI_CACHE=true to force call to AI)"));
return aiCache.cacheText ? `<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${aiCache.cacheText}` : attributesMarkdown;
Expand All @@ -264,15 +264,15 @@ export async function completeAttributesDescriptionWithAi(attributesMarkdown: st
// Replace description in markdown
if (aiResponse?.success) {
const responseText = aiResponse.promptResponse || "No AI description available";
await UtilsAi.writeAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown], responseText);
await UtilsAi.writeAiCache("PROMPT_COMPLETE_OBJECT_ATTRIBUTES_MD", [attributesMarkdown], objectName, responseText);
attributesMarkdown = `<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${responseText}`;
}
}
return attributesMarkdown;
}

async function completeObjectDocWithAiDescription(objectMarkdownDoc: string, objectName: string, objectXml: string, allObjectsNames: string, objectLinksDetails: string): Promise<string> {
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_OBJECT", [objectXml]);
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_OBJECT", [objectXml], objectName);
if (aiCache.success === true) {
uxLog(this, c.grey("Used AI cache for object description (set IGNORE_AI_CACHE=true to force call to AI)"));
const replaceText = `<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${aiCache.cacheText || ""}`;
Expand All @@ -289,7 +289,7 @@ async function completeObjectDocWithAiDescription(objectMarkdownDoc: string, obj
if (responseText.startsWith("##")) {
responseText = responseText.split("\n").slice(1).join("\n");
}
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_OBJECT", [objectXml], responseText);
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_OBJECT", [objectXml], objectName, responseText);
const replaceText = `<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${responseText}`;
const objectMarkdownDocUpdated = objectMarkdownDoc.replace("<!-- Object description -->", replaceText);
return objectMarkdownDocUpdated;
Expand Down
19 changes: 10 additions & 9 deletions src/common/utils/mermaidUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export async function generateFlowMarkdownFile(flowName: string, flowXml: string
const flowDocGenResult = await parseFlow(flowXml, 'mermaid', { outputAsMarkdown: true, collapsedDetails: options.collapsedDetails });
let flowMarkdownDoc = flowDocGenResult.uml;
if (options.describeWithAi) {
flowMarkdownDoc = await completeWithAiDescription(flowMarkdownDoc, flowXml);
flowMarkdownDoc = await completeWithAiDescription(flowMarkdownDoc, flowXml, flowName);
}
await fs.writeFile(outputFlowMdFile, flowMarkdownDoc);
uxLog(this, c.grey(`Written ${flowName} documentation in ${outputFlowMdFile}`));
Expand Down Expand Up @@ -227,7 +227,8 @@ export async function generateFlowVisualGitDiff(flowFile, commitBefore: string,
let diffMarkdown = compareMdLines.join("\n");

if (result.hasFlowDiffs === true && flowXmlAfter !== "" && flowXmlBefore !== "") {
diffMarkdown = await completeWithDiffAiDescription(diffMarkdown, flowXmlAfter, flowXmlBefore)
const flowDiffKey = `${flowLabel}-${commitBefore}-${commitAfter}`;
diffMarkdown = await completeWithDiffAiDescription(diffMarkdown, flowXmlAfter, flowXmlBefore, flowDiffKey)
}

// Write markdown with diff in a file
Expand Down Expand Up @@ -573,7 +574,7 @@ export async function generateHistoryDiffMarkdown(flowFile: string, debugMode: b
const reportDir = await getReportDirectory();
await fs.ensureDir(path.join(reportDir, "flow-diff"));
const diffMdFileTmp = path.join(reportDir, 'flow-diff', `${flowLabel}_${moment().format("YYYYMMDD-hhmmss")}.md`);
const genRes = await generateFlowMarkdownFile(flowFile, flowXml, diffMdFileTmp, { collapsedDetails: false, describeWithAi: false });
const genRes = await generateFlowMarkdownFile(flowLabel, flowXml, diffMdFileTmp, { collapsedDetails: false, describeWithAi: false });
if (!genRes) {
throw new Error(`Error generating markdown file for flow ${flowFile}`);
}
Expand Down Expand Up @@ -668,8 +669,8 @@ export function removeMermaidLinks(messageBody: string) {
return result;
}

async function completeWithAiDescription(flowMarkdownDoc: string, flowXml: string): Promise<string> {
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_FLOW", [flowXml]);
async function completeWithAiDescription(flowMarkdownDoc: string, flowXml: string, flowName: string): Promise<string> {
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_FLOW", [flowXml], flowName);
if (aiCache.success === true) {
uxLog(this, c.grey("Used AI cache for flow description (set IGNORE_AI_CACHE=true to force call to AI)"));
const replaceText = `## AI-Generated Description\n\n<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${aiCache.cacheText || ""}`;
Expand All @@ -685,7 +686,7 @@ async function completeWithAiDescription(flowMarkdownDoc: string, flowXml: strin
if (responseText.startsWith("##")) {
responseText = responseText.split("\n").slice(1).join("\n");
}
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_FLOW", [flowXml], responseText);
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_FLOW", [flowXml], flowName, responseText);
const replaceText = `## AI-Generated Description\n\n<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${responseText}`;
const flowMarkdownDocUpdated = flowMarkdownDoc.replace("<!-- Flow description -->", replaceText);
return flowMarkdownDocUpdated;
Expand All @@ -695,8 +696,8 @@ async function completeWithAiDescription(flowMarkdownDoc: string, flowXml: strin
}

/* jscpd:ignore-start */
async function completeWithDiffAiDescription(flowMarkdownDoc: string, flowXmlNew: string, flowXmlPrevious: string): Promise<string> {
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNew, flowXmlPrevious]);
async function completeWithDiffAiDescription(flowMarkdownDoc: string, flowXmlNew: string, flowXmlPrevious: string, diffKey: string): Promise<string> {
const aiCache = await UtilsAi.findAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNew, flowXmlPrevious], diffKey);
if (aiCache.success) {
uxLog(this, c.grey("Used AI cache for diff description (set IGNORE_AI_CACHE=true to force call to AI)"));
const replaceText = `## AI-Generated Differences Summary\n\n<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${aiCache.cacheText || ""}`;
Expand All @@ -712,7 +713,7 @@ async function completeWithDiffAiDescription(flowMarkdownDoc: string, flowXmlNew
if (responseText.startsWith("##")) {
responseText = responseText.split("\n").slice(1).join("\n");
}
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNew, flowXmlPrevious], responseText);
await UtilsAi.writeAiCache("PROMPT_DESCRIBE_FLOW_DIFF", [flowXmlNew, flowXmlPrevious], diffKey, responseText);
const replaceText = `## AI-Generated Differences Summary\n\n<!-- Cache file: ${aiCache.aiCacheDirFile} -->\n\n${aiCache.cacheText || ""}`;
const flowMarkdownDocUpdated = flowMarkdownDoc.replace("<!-- Flow description -->", replaceText);
return flowMarkdownDocUpdated;
Expand Down

0 comments on commit 179315b

Please sign in to comment.