Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

features: Google's Palm AI Integration, Vector Stores Integration, Unit Tests #130

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 53 additions & 0 deletions docs/docs/examples/palmai-bot.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
---
sidebar_position: 4
---

# Palm AI bot

This bot makes an API call to Palm AI and processes the user input. It uses chat-bison-001

```py
from textbase import bot, Message
from textbase.models import Palm
from typing import List

# Load your PalmAI API key
Palm.api_key = ""

# Prompt for chat-bison-001
SYSTEM_PROMPT = """You are chatting with an AI. There are no specific prefixes for responses, so you can ask or talk about anything you like.
The AI will respond in a natural, conversational manner. Feel free to start the conversation with any question or topic, and let's have a
pleasant chat!
"""

@bot()
def on_message(message_history: List[Message], state: dict = None):

# Generate Palm-model-chat-bison-001 response
bot_response = Palm.generate(
system_prompt=SYSTEM_PROMPT,
message_history=message_history
)

response = {
"data": {
"messages": [
{
"data_type": "STRING",
"value": bot_response
}
],
"state": state
},
"errors": [
{
"message": ""
}
]
}

return {
"status_code": 200,
"response": response
}
```
41 changes: 41 additions & 0 deletions examples/palmai-bot/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from textbase import bot, Message
from textbase.models import Palm
from typing import List

Palm.api_key = ""

SYSTEM_PROMPT = """You are chatting with an AI. There are no specific prefixes for responses, so you can ask or talk about anything you like.
The AI will respond in a natural, conversational manner. Feel free to start the conversation with any question or topic, and let's have a
pleasant chat!
"""

@bot()
def on_message(message_history: List[Message], state: dict = None):

# Generate Palm-model-chat-bison-001 response
bot_response = Palm.generate(
system_prompt=SYSTEM_PROMPT,
message_history=message_history
)

response = {
"data": {
"messages": [
{
"data_type": "STRING",
"value": bot_response
}
],
"state": state
},
"errors": [
{
"message": ""
}
]
}

return {
"status_code": 200,
"response": response
}
85 changes: 85 additions & 0 deletions stores/openai-bot/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from textbase import bot, Message
from textbase.models import OpenAI
from typing import List
import chromadb
from chromadb.api.types import Document, Embeddings
from chromadb.utils.embedding_functions import OpenAIEmbeddingFunction

OpenAI.api_key = ""

DOCUMENT = "Textbase is an awesome app"

embed_function = OpenAIEmbeddingFunction(OpenAI.api_key)


def create_chroma_db(documents, name):
chroma_client = chromadb.Client()
db = chroma_client.create_collection(
name=name, embedding_function=embed_function)
for i, d in enumerate(documents):
db.add(
documents=d,
ids=str(i)
)
return db


db = create_chroma_db([DOCUMENT], "textbasedb")


def get_relevant_passage(query, db):
passage = db.query(query_texts=[query], n_results=1)['documents'][0][0]
return passage


def make_prompt(query, relevant_passage):
escaped = relevant_passage.replace(
"'", "").replace('"', "").replace("\n", " ")
prompt = ("""You are a helpful and informative bot that answers questions using text from the reference passage included below. \
Be sure to respond in a complete sentence, being comprehensive, including all relevant background information. \
However, you are talking to a non-technical audience, so be sure to break down complicated concepts and \
strike a friendly and converstional tone. \
If the passage is irrelevant to the answer, you may ignore it.
QUESTION: '{query}'
PASSAGE: '{relevant_passage}'

ANSWER:
""").format(query=query, relevant_passage=escaped)

return prompt


@bot()
def on_message(message_history: List[Message], state: dict = None):

query = message_history[-1]['content'][0]['value']
passage = get_relevant_passage(query, db)
prompt = make_prompt(query, passage)

bot_response = OpenAI.generate(
system_prompt=prompt,
message_history=message_history,
model="gpt-3.5-turbo",
)

response = {
"data": {
"messages": [
{
"data_type": "STRING",
"value": bot_response
}
],
"state": state
},
"errors": [
{
"message": ""
}
]
}

return {
"status_code": 200,
"response": response
}
90 changes: 90 additions & 0 deletions stores/palmai-bot/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
from textbase import bot, Message
import google.generativeai as palm
from typing import List
import chromadb
from chromadb.api.types import Document, Embeddings

palm.configure(api_key='')

DOCUMENT = "Textbase is an awesome app"


def embed_function(texts: Document) -> Embeddings:
return [palm.generate_embeddings(model='models/embedding-gecko-001', text=text)['embedding']
for text in texts]


def create_chroma_db(documents, name):
chroma_client = chromadb.Client()
db = chroma_client.create_collection(
name=name, embedding_function=embed_function)
for i, d in enumerate(documents):
db.add(
documents=d,
ids=str(i)
)
return db


db = create_chroma_db([DOCUMENT], "textbasedb")


def get_relevant_passage(query, db):
passage = db.query(query_texts=[query], n_results=1)['documents'][0][0]
return passage


def make_prompt(query, relevant_passage):
escaped = relevant_passage.replace(
"'", "").replace('"', "").replace("\n", " ")
prompt = ("""You are a helpful and informative bot that answers questions using text from the reference passage included below. \
Be sure to respond in a complete sentence, being comprehensive, including all relevant background information. \
However, you are talking to a non-technical audience, so be sure to break down complicated concepts and \
strike a friendly and converstional tone. \
If the passage is irrelevant to the answer, you may ignore it.
QUESTION: '{query}'
PASSAGE: '{relevant_passage}'

ANSWER:
""").format(query=query, relevant_passage=escaped)

return prompt


@bot()
def on_message(message_history: List[Message], state: dict = None):

query = message_history[-1]['content'][0]['value']
passage = get_relevant_passage(query, db)
prompt = make_prompt(query, passage)

bot_response = palm.generate_text(
prompt=prompt,
model="models/text-bison-001",
temperature=0.65,
max_output_tokens=1000
)

bot_response = bot_response.candidates[0]['output']

response = {
"data": {
"messages": [
{
"data_type": "STRING",
"value": bot_response
}
],
"state": state
},
"errors": [
{
"message": ""
}
]
}

return {
"status_code": 200,
"response": response
}
33 changes: 33 additions & 0 deletions tests/test_bot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import pytest
from textbase import bot

class MockRequest:
def __init__(self, method, json_data):
self.method = method
self.json_data = json_data

def mock_bot_function(*args):
request = args[0]
response = ('', 204, {'Access-Control-Allow-Origin': '*'})
return response

def test_bot():
mock_request = MockRequest("OPTIONS", {
"data": {
"message_history": [],
"state": {}
}
})
result = bot()(mock_bot_function)(mock_request)
assert isinstance(result, tuple)
assert len(result) == 3
expected_headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Max-Age': '3600'
}
assert result[2] == expected_headers
assert result[0] == ''
assert result[1] == 204

Loading