2022-12-27 06:38:47 +00:00
|
|
|
from typing import Any, List, Tuple
|
2022-12-26 15:43:10 +00:00
|
|
|
|
2022-12-27 06:38:47 +00:00
|
|
|
import logging
|
2022-12-30 05:28:26 +00:00
|
|
|
import os
|
2022-12-25 17:15:24 +00:00
|
|
|
import sys
|
|
|
|
from pathlib import Path
|
|
|
|
|
2022-12-30 05:28:26 +00:00
|
|
|
import dotenv
|
2023-01-10 12:50:43 +00:00
|
|
|
import openai
|
|
|
|
from transformers import list_models, pipeline
|
2022-12-25 17:15:24 +00:00
|
|
|
|
2022-12-27 06:38:47 +00:00
|
|
|
logging.basicConfig(
|
|
|
|
filename="QA.log",
|
|
|
|
filemode="w",
|
|
|
|
level=logging.INFO,
|
|
|
|
format="%(name)s - %(levelname)s - %(message)s",
|
|
|
|
)
|
|
|
|
|
2022-12-25 17:15:24 +00:00
|
|
|
sys.path.append(str(Path(__file__).parent.parent.parent) + "/tools/NLP/data")
|
2022-12-27 06:38:47 +00:00
|
|
|
sys.path.append(str(Path(__file__).parent.parent.parent) + "/utils")
|
|
|
|
import config
|
2022-12-25 17:15:24 +00:00
|
|
|
import internet
|
|
|
|
|
2023-01-10 12:50:43 +00:00
|
|
|
dotenv.load_dotenv()
|
|
|
|
|
2022-12-26 15:43:10 +00:00
|
|
|
|
2022-12-30 06:50:36 +00:00
|
|
|
def answer(
|
2023-01-10 12:50:43 +00:00
|
|
|
query: str,
|
|
|
|
model: str = "openai-ChatGPT",
|
|
|
|
GOOGLE_SEARCH_API_KEY: str = "",
|
|
|
|
GOOGLE_SEARCH_ENGINE_ID: str = "",
|
|
|
|
OPENAI_API_KEY: str = "",
|
|
|
|
CHATGPT_SESSION_TOKEN: str = "",
|
2022-12-30 06:50:36 +00:00
|
|
|
) -> tuple[Any, list[str]]:
|
2023-01-10 12:50:43 +00:00
|
|
|
# if environment keys are not given, assume it is in env
|
|
|
|
if GOOGLE_SEARCH_API_KEY == "":
|
|
|
|
GOOGLE_SEARCH_API_KEY = str(os.environ.get("GOOGLE_SEARCH_API_KEY"))
|
|
|
|
if GOOGLE_SEARCH_ENGINE_ID == "":
|
|
|
|
GOOGLE_SEARCH_ENGINE_ID = str(os.environ.get("GOOGLE_SEARCH_ENGINE_ID"))
|
|
|
|
if OPENAI_API_KEY == "":
|
|
|
|
OPENAI_API_KEY = str(os.environ.get("OPENAI_API_KEY"))
|
|
|
|
openai.api_key = OPENAI_API_KEY
|
|
|
|
if CHATGPT_SESSION_TOKEN == "":
|
|
|
|
CHATGPT_SESSION_TOKEN = str(os.environ.get("CHATGPT_SESSION_TOKEN"))
|
|
|
|
"""
|
|
|
|
model naming convention
|
|
|
|
# Open-AI models:
|
|
|
|
include prefix openai-*
|
|
|
|
# HuggingFace
|
|
|
|
include prefix hf-*
|
|
|
|
#
|
|
|
|
"""
|
|
|
|
if not (model.startswith("openai-") == 0 or model.startswith("hf-") == 0):
|
|
|
|
model = "openai-ChatGPT" # Default
|
|
|
|
|
|
|
|
answer: str = ""
|
|
|
|
if model.startswith("openai-") == 0:
|
|
|
|
# results: tuple[list[str], list[str]] = internet.Google(
|
|
|
|
# query, GOOGLE_SEARCH_API_KEY, GOOGLE_SEARCH_ENGINE_ID
|
|
|
|
# ).google(filter_irrelevant=True)
|
|
|
|
print("hi")
|
|
|
|
else:
|
|
|
|
models = [
|
|
|
|
model
|
|
|
|
for model in list_models()
|
|
|
|
if "qa" in model or "question-answering" in model
|
|
|
|
]
|
|
|
|
model = model.replace("hf-", "", 1)
|
|
|
|
if not model in models:
|
|
|
|
model = "hf-"
|
|
|
|
# results: tuple[list[str], list[str]] = internet.Google(
|
|
|
|
# query, GOOGLE_SEARCH_API_KEY, GOOGLE_SEARCH_ENGINE_ID
|
|
|
|
# ).google(filter_irrelevant=False)
|
|
|
|
|
|
|
|
answer_result: tuple[Any, list[str]] = (answer, ["hi"]) # results[1])
|
2022-12-27 06:38:47 +00:00
|
|
|
if config.CONF_DEBUG:
|
2023-01-10 12:50:43 +00:00
|
|
|
logging.info(f"Answer: {answer_result}")
|
|
|
|
return answer_result
|
2022-12-27 06:38:47 +00:00
|
|
|
|
|
|
|
|
2023-01-01 13:01:12 +00:00
|
|
|
# print(os.environ)
|
2023-01-10 12:50:43 +00:00
|
|
|
print(answer("What is the newest Pokemon Game?"))
|
2022-12-27 06:38:47 +00:00
|
|
|
# def custom_answer
|