Skip to content

Commit

Permalink
Check for model max context
Browse files Browse the repository at this point in the history
  • Loading branch information
Keskimaki committed Apr 25, 2023
1 parent 2a9d88c commit 4299fac
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions src/server/router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,20 @@ router.post('/stream', async (req, res) => {
// const usageAllowed = await checkUsage(user, service)
// if (!usageAllowed) return res.status(403).send('Usage limit reached')

const encoding = getEncoding(options.model)
let tokenCount = calculateUsage(options, encoding)

// gpt-3.5-turbo has maximum context of 4096 tokens
if (tokenCount > 4000)
return res.status(403).send('Model maximum context reached')

options.user = hashData(user.id)
const stream = await completionStream(options)

if (isError(stream)) return res.status(424).send(stream)

res.setHeader('content-type', 'text/plain')

const encoding = getEncoding(options.model)

let tokenCount = calculateUsage(options, encoding)
// https://github.com/openai/openai-node/issues/18#issuecomment-1493132878
stream.on('data', (chunk: Buffer) => {
// Messages in the event stream are separated by a pair of newline characters.
Expand Down

0 comments on commit 4299fac

Please sign in to comment.