forked from crowetic/commerce
caching
This commit is contained in:
parent
59a00fe9ea
commit
d12fb77ef9
@ -1,6 +1,7 @@
|
|||||||
import { OpenAIStream, StreamingTextResponse } from 'ai';
|
import { OpenAIStream, StreamingTextResponse } from 'ai';
|
||||||
import { getCart } from 'lib/shopify';
|
import { getCart } from 'lib/shopify';
|
||||||
import { Product } from 'lib/shopify/types';
|
import { Product } from 'lib/shopify/types';
|
||||||
|
import { unstable_cache } from 'next/cache';
|
||||||
import { cookies } from 'next/headers';
|
import { cookies } from 'next/headers';
|
||||||
import OpenAI from 'openai';
|
import OpenAI from 'openai';
|
||||||
import { Suspense } from 'react';
|
import { Suspense } from 'react';
|
||||||
@ -24,10 +25,7 @@ export async function FitToCart({ currentProduct }: { currentProduct: Product })
|
|||||||
async function FitToCartInternal({ currentProduct }: { currentProduct: Product }) {
|
async function FitToCartInternal({ currentProduct }: { currentProduct: Product }) {
|
||||||
const pitch = await getPitch({ currentProduct });
|
const pitch = await getPitch({ currentProduct });
|
||||||
if (!pitch) return null;
|
if (!pitch) return null;
|
||||||
let text = await pitch.text();
|
return <div className="mt-6 text-sm leading-tight dark:text-white/[60%]">{pitch}</div>;
|
||||||
if (!text) return null;
|
|
||||||
text = text.trim().replace(/^"/, '').replace(/"$/, '');
|
|
||||||
return <div className="mt-6 text-sm leading-tight dark:text-white/[60%]">{text}</div>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const fireworks = new OpenAI({
|
const fireworks = new OpenAI({
|
||||||
@ -54,8 +52,7 @@ export async function getPitch({ currentProduct }: { currentProduct: Product })
|
|||||||
' and '
|
' and '
|
||||||
)} in their shopping cart should also purchase the "${currentProduct.title}"`;
|
)} in their shopping cart should also purchase the "${currentProduct.title}"`;
|
||||||
|
|
||||||
// Request the Fireworks API for the response based on the prompt
|
const query = {
|
||||||
const response = await fireworks.chat.completions.create({
|
|
||||||
model: 'accounts/fireworks/models/mistral-7b-instruct-4k',
|
model: 'accounts/fireworks/models/mistral-7b-instruct-4k',
|
||||||
stream: true,
|
stream: true,
|
||||||
messages: buildPrompt(prompt),
|
messages: buildPrompt(prompt),
|
||||||
@ -63,11 +60,25 @@ export async function getPitch({ currentProduct }: { currentProduct: Product })
|
|||||||
temperature: 0.75,
|
temperature: 0.75,
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
frequency_penalty: 1
|
frequency_penalty: 1
|
||||||
});
|
} as const;
|
||||||
|
|
||||||
|
return unstable_cache(async () => {
|
||||||
|
// Request the Fireworks API for the response based on the prompt
|
||||||
|
const response = await fireworks.chat.completions.create(query);
|
||||||
|
|
||||||
// Convert the response into a friendly text-stream
|
// Convert the response into a friendly text-stream
|
||||||
const stream = OpenAIStream(response);
|
const stream = OpenAIStream(response);
|
||||||
|
|
||||||
// Respond with the stream
|
// Respond with the stream
|
||||||
return new StreamingTextResponse(stream);
|
const streamingResponse = new StreamingTextResponse(stream);
|
||||||
|
let text = await streamingResponse.text();
|
||||||
|
// Remove the quotes from the response tht the LLM sometimes adds.
|
||||||
|
text = text.trim().replace(/^"/, '').replace(/"$/, '');
|
||||||
|
return text;
|
||||||
|
}, [
|
||||||
|
JSON.stringify(query),
|
||||||
|
'1.0',
|
||||||
|
process.env.VERCEL_BRANCH_URL || '',
|
||||||
|
process.env.NODE_ENV || ''
|
||||||
|
])();
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user