Skip to content

Commit

Permalink
Report on tokens remaining, add clarifications.
Browse files Browse the repository at this point in the history
  • Loading branch information
liffiton committed Jul 30, 2024
1 parent a1dceed commit 15c2774
Show file tree
Hide file tree
Showing 6 changed files with 79 additions and 42 deletions.
13 changes: 7 additions & 6 deletions src/codehelp/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@
@bp.route("/ctx/<int:class_id>/<string:ctx_name>")
@login_required
@class_enabled_required
def help_form(query_id: int | None = None, class_id: int | None = None, ctx_name: str | None = None) -> str | Response:
@with_llm(spend_token=False) # get information on the selected LLM, tokens remaining
def help_form(llm: LLMConfig, query_id: int | None = None, class_id: int | None = None, ctx_name: str | None = None) -> str | Response:
db = get_db()
auth = get_auth()

Expand Down Expand Up @@ -99,7 +100,7 @@ def help_form(query_id: int | None = None, class_id: int | None = None, ctx_name

history = get_history()

return render_template("help_form.html", query=query_row, history=history, contexts=contexts, selected_context_name=selected_context_name)
return render_template("help_form.html", llm=llm, query=query_row, history=history, contexts=contexts, selected_context_name=selected_context_name)


@bp.route("/view/<int:query_id>")
Expand Down Expand Up @@ -224,7 +225,7 @@ def record_response(query_id: int, responses: list[dict[str, str]], texts: dict[
@bp.route("/request", methods=["POST"])
@login_required
@class_enabled_required
@with_llm()
@with_llm(spend_token=True)
def help_request(llm: LLMConfig) -> Response:
if 'context' in request.form:
context = get_context_by_name(request.form['context'])
Expand All @@ -246,7 +247,7 @@ def help_request(llm: LLMConfig) -> Response:

@bp.route("/load_test", methods=["POST"])
@admin_required
@with_llm(use_system_key=True) # get a populated LLMConfig
@with_llm(use_system_key=True) # get a populated LLMConfig; not actually used (API is mocked)
def load_test(llm: LLMConfig) -> Response:
# Require that we're logged in as the load_test admin user
auth = get_auth()
Expand Down Expand Up @@ -284,7 +285,7 @@ def post_helpful() -> str:
@bp.route("/topics/html/<int:query_id>", methods=["GET", "POST"])
@login_required
@tester_required
@with_llm()
@with_llm(spend_token=False)
def get_topics_html(llm: LLMConfig, query_id: int) -> str:
topics = get_topics(llm, query_id)
if not topics:
Expand All @@ -296,7 +297,7 @@ def get_topics_html(llm: LLMConfig, query_id: int) -> str:
@bp.route("/topics/raw/<int:query_id>", methods=["GET", "POST"])
@login_required
@tester_required
@with_llm()
@with_llm(spend_token=False)
def get_topics_raw(llm: LLMConfig, query_id: int) -> list[str]:
topics = get_topics(llm, query_id)
return topics
Expand Down
10 changes: 8 additions & 2 deletions src/codehelp/templates/help_form.html
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,13 @@
{{ auth['class_name'] }}
</div>
</div>
{% elif llm.tokens_remaining != None %}
<div class="notification is-size-5">
<strong>Using free queries:</strong>
{{ llm.tokens_remaining }} queries remaining.
<button class="button is-small is-rounded p-2 ml-3" type="button" onClick="document.getElementById('free_query_dialog').showModal();">what's this?</span>
</div>
{% include "free_query_dialog.html" %}
{% endif %}

{% if contexts %}
Expand All @@ -37,8 +44,7 @@
{% endif %}

<div class="field is-horizontal">
<div class="field-label is-normal">
<label class="label" for="code">Code:</label>
<div class="field-label is-normal"> <label class="label" for="code">Code:</label>
<p class="help-text">Copy just the <i>most relevant</i> part of your code here. Responses will be more helpful when you include only code relevant to your issue.</p>
</div>
<div class="field-body">
Expand Down
42 changes: 25 additions & 17 deletions src/gened/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,10 @@ class NoTokensError(Exception):
class LLMConfig:
client: AsyncOpenAI
model: str
tokens_remaining: int | None = None # None if current user is not using tokens


def _get_llm(*, use_system_key: bool) -> LLMConfig:
def _get_llm(*, use_system_key: bool, spend_token: bool) -> LLMConfig:
''' Get model details and an initialized OpenAI client based on the
arguments and the current user and class.
Expand All @@ -46,11 +47,10 @@ def _get_llm(*, use_system_key: bool) -> LLMConfig:
b) User class config is in the user class.
c) If there is a current class but it is disabled or has no key, raise an error.
3) If the user is a local-auth user, the system API key and GPT-3.5 is used.
4) Otherwise, we use tokens.
The user must have 1 or more tokens remaining.
4) Otherwise, we use tokens and the system API key / model.
If spend_token is True, the user must have 1 or more tokens remaining.
If they have 0 tokens, raise an error.
Otherwise, their token count is decremented, and the system API
key is used with GPT-3.5.
Otherwise, their token count is decremented.
Returns:
LLMConfig with an OpenAI client and model name.
Expand All @@ -59,7 +59,7 @@ def _get_llm(*, use_system_key: bool) -> LLMConfig:
'''
db = get_db()

def make_system_client() -> LLMConfig:
def make_system_client(tokens_remaining: int | None = None) -> LLMConfig:
""" Factory function to initialize a default client (using the system key)
only if/when needed.
"""
Expand All @@ -70,6 +70,7 @@ def make_system_client() -> LLMConfig:
return LLMConfig(
client=AsyncOpenAI(api_key=system_key),
model=system_model,
tokens_remaining=tokens_remaining,
)

if use_system_key:
Expand Down Expand Up @@ -124,46 +125,53 @@ def make_system_client() -> LLMConfig:
return make_system_client()

tokens = user_row['query_tokens']

if tokens == 0:
raise NoTokensError

# user.tokens > 0, so decrement it and use the system key
db.execute("UPDATE users SET query_tokens=query_tokens-1 WHERE id=?", [auth['user_id']])
db.commit()
return make_system_client()
if spend_token:
# user.tokens > 0, so decrement it and use the system key
db.execute("UPDATE users SET query_tokens=query_tokens-1 WHERE id=?", [auth['user_id']])
db.commit()
tokens -= 1

return make_system_client(tokens_remaining = tokens)


# For decorator type hints
P = ParamSpec('P')
R = TypeVar('R')


def with_llm(*, use_system_key: bool = False) -> Callable[[Callable[P, R]], Callable[P, str | R]]:
def with_llm(*, use_system_key: bool = False, spend_token: bool = False) -> Callable[[Callable[P, R]], Callable[P, str | R]]:
'''Decorate a view function that requires an LLM and API key.
Assigns an 'llm' named argument.
Checks that the current user has access to an LLM and API key (configured
in an LTI consumer or user-created class), then passes the appropriate
model info and API key to the wrapped view function, if granted.
LLM config to the wrapped view function, if granted.
Arguments:
use_system_key: If True, all users can access this, and they use the
system API key and GPT-3.5.
system API key and model.
spend_token: If True *and* the user is using tokens, then check
that they have tokens remaining and decrement their
tokens.
'''
def decorator(f: Callable[P, R]) -> Callable[P, str | R]:
@wraps(f)
def decorated_function(*args: P.args, **kwargs: P.kwargs) -> str | R:
try:
llm = _get_llm(use_system_key=use_system_key)
llm = _get_llm(use_system_key=use_system_key, spend_token=spend_token)
except ClassDisabledError:
flash("Error: The current class is archived or disabled. Request cannot be submitted.")
flash("Error: The current class is archived or disabled.")
return render_template("error.html")
except NoKeyFoundError:
flash("Error: No API key set. Request cannot be submitted.")
flash("Error: No API key set. An API key must be set by the instructor before this page can be used.")
return render_template("error.html")
except NoTokensError:
flash("You have used all of your free tokens. If you are using this application in a class, please connect using the link from your class. Otherwise, you can create a class and add an OpenAI API key or contact us if you want to continue using this application.", "warning")
flash("You have used all of your free queries. If you are using this application in a class, please connect using the link from your class for continued access. Otherwise, you can create a class and add an OpenAI API key or contact us if you want to continue using this application.", "warning")
return render_template("error.html")

kwargs['llm'] = llm
Expand Down
9 changes: 9 additions & 0 deletions src/gened/templates/free_query_dialog.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
<dialog id="free_query_dialog" style="width: max-content; min-width: min(32em, 100vw);">
<div class="content box" style="position: relative;">
<h2>Free Queries</h2>
<button class="delete" type="button" style="position: absolute; right: 1rem; top: 1rem;" aria-label="close" onClick="document.getElementById('free_query_dialog').close();"></button>
<p>You have a limited number of free queries to try out {{ config['APPLICATION_TITLE'] }} when you are not connected to a class.</p>
<p>If you are using this application in a class, please connect using the link from your class for unlimited use.</p>
<p>Otherwise, you can <a href="{{ url_for('docs.page', name='manual_class_creation') }}">create a class</a> and add an OpenAI API key or contact us if you want to continue using {{ config['APPLICATION_TITLE'] }}.</p>
</div>
</dialog>
16 changes: 12 additions & 4 deletions src/gened/templates/profile_view.html
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

{% block body %}
<style type="text/css">
dl.profile { display: grid; grid-template-columns: minmax(6em, max-content) 1fr; gap: 1em; }
dl.profile { display: grid; grid-template-columns: minmax(min-content, max-content) 1fr; gap: 1em; }
dl.profile dt { font-weight: bold; text-align: right; }
dl.profile dd { margin: 0; }
</style>
Expand Down Expand Up @@ -50,8 +50,11 @@ <h1 class="title">Your Profile</h1>
<dt>Queries:</dt>
<dd>{{ user.num_queries }} total, {{ user.num_recent_queries }} in the past week.</dd>
{% if not auth['role'] %}
<dt>Tokens:</dt>
<dd>{{ user.query_tokens }} remaining. <span class="ml-3 is-size-6 has-text-grey">(These are just for trying out {{ config['APPLICATION_TITLE'] }} -- each query will use one. They are only used when you do not have a class active.)</span></dd>
<dt>Free Queries:</dt>
<dd>
{{ user.query_tokens }} remaining.
<button class="button is-small is-rounded p-2 ml-3" type="button" onClick="document.getElementById('free_query_dialog').showModal();">what's this?</span>
</dd>
{% endif %}
</dl>
<h2 class="title is-size-3">
Expand Down Expand Up @@ -102,6 +105,8 @@ <h2 class="title is-size-3">
{% endif %}
</dl>

</div>
{% if user.provider_name not in ['lti', 'demo'] %}
<dialog id="new_class_dialog" style="width: 75%; min-width: min(32em, 100vw);">
<div class="content box">
<h2>Create a New Class</h2>
Expand Down Expand Up @@ -133,6 +138,9 @@ <h2>Create a New Class</h2>
</form>
</div>
</dialog>
</div>
{% endif %}
{% if not auth['role'] %}
{% include "free_query_dialog.html" %}
{% endif %}
</section>
{% endblock %}
31 changes: 18 additions & 13 deletions tests/test_demo_links.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,30 +24,35 @@ def test_valid_demo_link(client):
response = client.get("/demo/test_valid")
assert "Invalid demo link." not in response.text

# test_data.sql assigns 3 tokens
response = client.get("/help/")
assert response.status_code == 200 # unauthorized in all of these cases

# Try 5 queries, verifying the tokens work (test_data.sql assigns 3 for this demo link)
for i in range(5):
response1 = client.get("/help/")
test_code = f"_test_code_{i}_"
response = client.post(
response2 = client.post(
'/help/request',
data={'code': test_code, 'error': '_test_error_', 'issue': '_test_issue_'}
data={'code': test_code, 'error': f'_test_error_{i}_', 'issue': f'_test_issue_{i}_'}
)
if i < 3:
assert response1.status_code == 200 # unauthorized in all of these cases
assert f"{3-i} queries remaining." in response1.text
# successful requests should redirect to a response page with the same items
assert response.status_code == 302 # redirect
response = client.get(response.location)
assert test_code in response.text
assert '_test_error_' in response.text
assert '_test_issue_' in response.text
assert response2.status_code == 302 # redirect
response3 = client.get(response2.location)
assert test_code in response3.text
assert f'_test_error_{i}_' in response3.text
assert f'_test_issue_{i}_' in response3.text
else:
assert response1.status_code == 200 # unauthorized in all of these cases
assert "You have used all of your free queries." in response1.text
# those without tokens remaining return an error page directly
assert response.status_code == 200
assert "You have used all of your free tokens." in response.text
assert test_code not in response.text
assert '_test_error_' not in response.text
assert '_test_issue_' not in response.text
assert response2.status_code == 200
assert "You have used all of your free queries." in response2.text
assert test_code not in response2.text
assert '_test_error_' not in response2.text
assert '_test_issue_' not in response2.text


def test_logged_in(auth, client):
Expand Down

0 comments on commit 15c2774

Please sign in to comment.