Skip to content

Commit

Permalink
Modify our excerpt prompt a bit more, trying to ensure the summary ge…
Browse files Browse the repository at this point in the history
…nerated pairs well with the title. Exclude the title from the content we summarize. Increase temperature value
  • Loading branch information
dkotter committed Jul 27, 2023
1 parent a8d37b4 commit fe72197
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions includes/Classifai/Providers/OpenAI/ChatGPT.php
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ public function generate_excerpt( int $post_id = 0, array $args = [] ) {
*
* @return {string} Prompt.
*/
$prompt = apply_filters( 'classifai_chatgpt_excerpt_prompt', 'Provide a teaser for the following text using a maximum of ' . $excerpt_length . ' words', $post_id, $excerpt_length );
$prompt = apply_filters( 'classifai_chatgpt_excerpt_prompt', sprintf( 'Summarize the following message using a maximum of %d words. Ensure this summary pairs well with the following text: %s.', $excerpt_length, get_the_title( $post_id ) ), $post_id, $excerpt_length );

/**
* Filter the request body before sending to ChatGPT.
Expand All @@ -575,12 +575,16 @@ public function generate_excerpt( int $post_id = 0, array $args = [] ) {
[
'model' => $this->chatgpt_model,
'messages' => [
[
'role' => 'system',
'content' => $prompt,
],
[
'role' => 'user',
'content' => $prompt . ': ' . $this->get_content( $post_id, $excerpt_length, true, $args['content'] ) . '',
'content' => $this->get_content( $post_id, $excerpt_length, false, $args['content'] ) . '',
],
],
'temperature' => 0,
'temperature' => 0.9,
],
$post_id
);
Expand Down Expand Up @@ -731,10 +735,10 @@ public function get_content( int $post_id = 0, int $return_length = 0, bool $use
/**
* We then subtract those tokens from the max number of tokens ChatGPT allows
* in a single request, as well as subtracting out the number of tokens in our
* prompt (13). ChatGPT counts both the tokens in the request and in
* prompt (~50). ChatGPT counts both the tokens in the request and in
* the response towards the max.
*/
$max_content_tokens = $this->max_tokens - $return_tokens - 13;
$max_content_tokens = $this->max_tokens - $return_tokens - 50;

if ( empty( $post_content ) ) {
$post = get_post( $post_id );
Expand Down

0 comments on commit fe72197

Please sign in to comment.