Skip to content

Commit

Permalink
feat(misc): refactored and refined code
Browse files Browse the repository at this point in the history
  • Loading branch information
mandarini committed Aug 3, 2023
1 parent 25b51c0 commit d572e27
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 91 deletions.
111 changes: 26 additions & 85 deletions nx-dev/data-access-ai/src/lib/data-access-ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,25 @@ import {
OpenAIApi,
CreateModerationResponse,
CreateEmbeddingResponse,
ChatCompletionRequestMessageRoleEnum,
CreateCompletionResponseUsage,
} from 'openai';
import {
ApplicationError,
ChatItem,
PageSection,
UserError,
checkEnvVariables,
getListOfSources,
getMessageFromResponse,
initializeChat,
sanitizeLinksInResponse,
toMarkdownList,
} from './utils';

const DEFAULT_MATCH_THRESHOLD = 0.78;
const DEFAULT_MATCH_COUNT = 15;
const MIN_CONTENT_LENGTH = 50;

const openAiKey = process.env['NX_OPENAI_KEY'];
const supabaseUrl = process.env['NX_NEXT_PUBLIC_SUPABASE_URL'];
const supabaseServiceKey = process.env['NX_SUPABASE_SERVICE_ROLE_KEY'];
Expand All @@ -27,10 +35,7 @@ const config = new Configuration({
});
const openai = new OpenAIApi(config);

let chatHistory: {
role: ChatCompletionRequestMessageRoleEnum;
content: string;
}[] = [];
let chatFullHistory: ChatItem[] = [];

let totalTokensSoFar = 0;

Expand All @@ -44,27 +49,16 @@ export async function nxDevDataAccessAi(
sourcesMarkdown: string;
}> {
try {
if (!openAiKey) {
throw new ApplicationError('Missing environment variable NX_OPENAI_KEY');
}

if (!supabaseUrl) {
throw new ApplicationError(
'Missing environment variable NX_NEXT_PUBLIC_SUPABASE_URL'
);
}

if (!supabaseServiceKey) {
throw new ApplicationError(
'Missing environment variable NX_SUPABASE_SERVICE_ROLE_KEY'
);
}
checkEnvVariables(openAiKey, supabaseUrl, supabaseServiceKey);

if (!query) {
throw new UserError('Missing query in request data');
}

const supabaseClient = createClient(supabaseUrl, supabaseServiceKey);
const supabaseClient = createClient(
supabaseUrl as string,
supabaseServiceKey as string
);

// Moderate the content to comply with OpenAI T&C
const sanitizedQuery = query.trim();
Expand Down Expand Up @@ -102,9 +96,9 @@ export async function nxDevDataAccessAi(
'match_page_sections_2',
{
embedding,
match_threshold: 0.78,
match_count: 15,
min_content_length: 50,
match_threshold: DEFAULT_MATCH_THRESHOLD,
match_count: DEFAULT_MATCH_COUNT,
min_content_length: MIN_CONTENT_LENGTH,
}
);

Expand Down Expand Up @@ -151,61 +145,15 @@ export async function nxDevDataAccessAi(
.trim()}
`;

let finalQuery;
let chatGptMessages: {
role: ChatCompletionRequestMessageRoleEnum;
content: string;
}[] = [];
const { chatMessages: chatGptMessages, chatHistory } = initializeChat(
chatFullHistory,
query,
contextText,
prompt,
aiResponse
);

if (chatHistory.length > 0 && aiResponse) {
finalQuery = `Here is the question you should answer: ${sanitizedQuery}`;
chatGptMessages = [
...chatHistory,
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: aiResponse,
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: finalQuery,
},
];
chatHistory.push(
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: aiResponse,
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: finalQuery,
}
);
} else {
finalQuery = `
Here is the Nx documentation you should use: ${contextText}.
Here is the question you should answer: ${sanitizedQuery}
`;
chatHistory.push(
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: prompt,
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: finalQuery,
}
);
chatGptMessages = [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: prompt,
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: finalQuery,
},
];
}
chatFullHistory = chatHistory;

const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo-16k',
Expand Down Expand Up @@ -249,10 +197,3 @@ export async function nxDevDataAccessAi(
throw err;
}
}
export class ApplicationError extends Error {
constructor(message: string, public data: Record<string, any> = {}) {
super(message);
}
}

export class UserError extends ApplicationError {}
81 changes: 75 additions & 6 deletions nx-dev/data-access-ai/src/lib/utils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import { CreateChatCompletionResponse } from 'openai';
import {
ChatCompletionRequestMessageRoleEnum,
CreateChatCompletionResponse,
} from 'openai';
export interface PageSection {
id: number;
page_id: number;
Expand All @@ -12,11 +15,6 @@ export interface PageSection {
export function getMessageFromResponse(
response: CreateChatCompletionResponse
): string {
/**
*
* This function here will or may be enhanced
* once we add more functionality
*/
return response.choices[0].message?.content ?? '';
}

Expand Down Expand Up @@ -85,3 +83,74 @@ async function is404(url: string): Promise<boolean> {
}
}
}

export function checkEnvVariables(
openAiKey?: string,
supabaseUrl?: string,
supabaseServiceKey?: string
) {
if (!openAiKey) {
throw new ApplicationError('Missing environment variable NX_OPENAI_KEY');
}
if (!supabaseUrl) {
throw new ApplicationError(
'Missing environment variable NX_NEXT_PUBLIC_SUPABASE_URL'
);
}
if (!supabaseServiceKey) {
throw new ApplicationError(
'Missing environment variable NX_SUPABASE_SERVICE_ROLE_KEY'
);
}
}

export class ApplicationError extends Error {
constructor(message: string, public data: Record<string, any> = {}) {
super(message);
}
}

export class UserError extends ApplicationError {}

export function initializeChat(
chatFullHistory: ChatItem[],
query: string,
contextText: string,
prompt: string,
aiResponse?: string
): { chatMessages: ChatItem[]; chatHistory: ChatItem[] } {
let finalQuery;
let chatGptMessages: ChatItem[] = [];
let messages: ChatItem[] = [];

if (chatFullHistory.length > 0) {
finalQuery = `Here is the question you should answer: ${query}`;
messages = [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: aiResponse ?? '',
},
{ role: ChatCompletionRequestMessageRoleEnum.User, content: finalQuery },
];
chatGptMessages = [...chatFullHistory, ...messages];
} else {
finalQuery = `
Here is the Nx documentation you should use: ${contextText}.
Here is the question you should answer: ${query}
`;
messages = [
{ role: ChatCompletionRequestMessageRoleEnum.System, content: prompt },
{ role: ChatCompletionRequestMessageRoleEnum.User, content: finalQuery },
];
chatGptMessages = [...messages];
}

chatFullHistory.push(...messages);

return { chatMessages: chatGptMessages, chatHistory: chatFullHistory };
}

export interface ChatItem {
role: ChatCompletionRequestMessageRoleEnum;
content: string;
}

0 comments on commit d572e27

Please sign in to comment.