Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion .config.schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,26 @@ properties:
- required: ["event"]
- required: ["after_messages"]
required: ["message", "trigger"]
required: ["features", "messages"]
usage_limits:
type: object
properties:
message_rates:
type: array
items:
type: object
properties:
users:
type: array
items:
type: string
oneOf:
- pattern: "@.+\\..+"
- enum: ["all", "logged_in", "guests"]
max_messages:
type: integer
interval:
type: string
pattern: "^[0-9]+[smhdw]$"
required: ["users", "max_messages", "interval"]
required: ["message_rates"]
required: ["features", "messages", "usage_limits"]
32 changes: 22 additions & 10 deletions bin/chat-chainlit.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,16 @@

from conversational_chain.graph import RAGGraphWithMemory
from retreival_chain import create_retrieval_chain
from util.chainlit_helpers import is_feature_enabled, static_messages
from util.chainlit_helpers import is_feature_enabled # save_openai_metrics,
from util.chainlit_helpers import (message_rate_limited, static_messages,
update_search_results)
from util.config_yml import Config, TriggerEvent
from util.embedding_environment import EmbeddingEnvironment
from util.logging import logging

# from langchain_community.callbacks import OpenAICallbackHandler


load_dotenv()
config: Config | None = Config.from_yaml()

Expand Down Expand Up @@ -77,33 +82,40 @@ async def end() -> None:

@cl.on_message
async def main(message: cl.Message) -> None:
if await message_rate_limited(config):
return

await static_messages(config, TriggerEvent.on_message)

message_count: int = cl.user_session.get("message_count", 0) + 1
cl.user_session.set("message_count", message_count)

thread_id: str = cl.user_session.get("thread_id")
cb = cl.AsyncLangchainCallbackHandler(

chainlit_cb = cl.AsyncLangchainCallbackHandler(
stream_final_answer=True,
force_stream_final_answer=True, # we're not using prefix tokens
)
# openai_cb = OpenAICallbackHandler()

enable_postprocess: bool = is_feature_enabled(config, "postprocessing")
result: dict[str, Any] = await llm_graph.ainvoke(
message.content,
callbacks=[cb],
callbacks=[chainlit_cb],
thread_id=thread_id,
enable_postprocess=enable_postprocess,
)

if (
enable_postprocess
and cb.final_stream
and chainlit_cb.final_stream
and len(result["additional_content"]["search_results"]) > 0
):
sent_message: cl.Message = cb.final_stream
search_results_element = cl.CustomElement(
name="SearchResults",
props={"results": result["additional_content"]["search_results"]},
await update_search_results(
result["additional_content"]["search_results"],
chainlit_cb.final_stream,
)
sent_message.elements = [search_results_element] # type: ignore
await sent_message.update()

await static_messages(config, after_messages=message_count)

# save_openai_metrics(message.id, openai_cb)
61 changes: 61 additions & 0 deletions bin/export_nologin_usage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import csv
import os
from argparse import ArgumentParser
from pathlib import Path

import psycopg
from dotenv import load_dotenv

load_dotenv()

LANGGRAPH_NOLOGIN_DB_URI = f"postgresql://{os.getenv('POSTGRES_USER')}:{os.getenv('POSTGRES_PASSWORD')}@postgres:5432/{os.getenv('POSTGRES_LANGGRAPH_DB')}_no_login?sslmode=disable"


def build_query() -> str:
query = """
SELECT
thread_id,
checkpoint_id,
checkpoint->'ts' AS checkpoint_ts
FROM
checkpoints
WHERE
checkpoint_ns = '' AND
parent_checkpoint_id IS NULL
ORDER BY
checkpoint->'ts';
"""
return query


def main(records_dir: Path):
records_dir.mkdir(exist_ok=True)

query: str = build_query()

with psycopg.connect(LANGGRAPH_NOLOGIN_DB_URI) as conn:
with conn.cursor() as cur:
cur.execute(query)
header = [col.name for col in cur.description] if cur.description else None
records = cur.fetchall()

if len(records) == 0:
print("No new records found.")
return

record_file = records_dir / "nologin_usage.csv"

with open(record_file, mode="w", newline="") as file:
writer = csv.writer(file, lineterminator="\n")
if header:
writer.writerow(header)
writer.writerows(records)

print("Wrote", record_file)


if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("records_dir", type=Path, nargs="?", default=Path("records"))
args = parser.parse_args()
main(**vars(args))
3 changes: 3 additions & 0 deletions bin/export_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ def build_query(since_timestamp: str | None) -> str:
FROM steps
LEFT JOIN
feedbacks ON steps."parentId" = feedbacks."forId"
LEFT JOIN
threads ON steps."threadId" = threads.id
WHERE
steps.type IN ('user_message', 'assistant_message') AND
steps."createdAt" > '{since_timestamp}'
Expand All @@ -37,6 +39,7 @@ def build_query(since_timestamp: str | None) -> str:
),
steps."createdAt";
"""
# TODO: add to SELECT , threads.metadata->'openai_metrics'->steps.id::text AS openai_metrics
return query


Expand Down
6 changes: 6 additions & 0 deletions config_default.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ features:
enabled: true
user_group: all

usage_limits:
message_rates:
- users: ["all"]
max_messages: 100
interval: 3h

messages:

welcome:
Expand Down
Loading