updated askai

This commit is contained in:
test 2024-01-20 21:30:00 -05:00
parent d659c54402
commit ffe352a91f
2 changed files with 34 additions and 10 deletions

View File

@ -1,5 +1,5 @@
# askai # askai
askai is an interactive Linux tool that processes data from stdin according to user-specified instructions. It utilizes LangChain's Ollama model for generating responses. askai is an interactive Linux tool that processes data from stdin according to user-specified instructions. It utilizes Ollama and opensource ai models for generating responses.
## Usage ## Usage
To use the tool, simply input your information through stdin and pass your command line argument as a parameter. If you want to save default settings or update them, run askai with `--config` flag. To use the tool, simply input your information through stdin and pass your command line argument as a parameter. If you want to save default settings or update them, run askai with `--config` flag.

42
askai
View File

@ -1,10 +1,31 @@
#!/usr/bin/env python #!/usr/bin/env python
from langchain.llms import Ollama
import json import json
import requests
import os import os
import sys import sys
import textwrap import textwrap
from rich.markdown import Markdown
from rich.console import Console
def generate_text(url, model, prompt):
url = str(url)+"/api/generate"
data = {
"model": str(model),
"prompt": str(prompt),
"stream": False,
"options": {
"temperature": 0.6,
}
}
response = requests.post(url, json=data)
if response.text == "404 page not found":
print(f"\n\nError: Ollama does not appear to be up or the url is wrong. url: {url}\n\n")
exit()
text = json.loads(response.text)
return text["response"]
if '--config' in sys.argv: if '--config' in sys.argv:
# Check that no # Check that no
@ -13,12 +34,12 @@ if '--config' in sys.argv:
exit() exit()
# Check if the config.json file exists # Check if the config.json file exists
if not os.path.isfile(os.path.expanduser('~/.config.json')): if not os.path.isfile(os.path.expanduser('~/.askai_config.json')):
# Create a new configuration file # Create a new configuration file
f = open(os.path.expanduser('~/.config.json'), 'x') f = open(os.path.expanduser('~/.askai_config.json'), 'x')
f.close() f.close()
f = open(os.path.expanduser('~/.config.json'), 'w') f = open(os.path.expanduser('~/.askai_config.json'), 'w')
data = {} data = {}
# Ask the user to set the model details # Ask the user to set the model details
@ -47,7 +68,7 @@ if '--config' in sys.argv:
else: else:
# Load model details from the config file # Load model details from the config file
try: try:
with open(os.path.expanduser('~/.config.json'), 'r') as f: with open(os.path.expanduser('~/.askai_config.json'), 'r') as f:
data = json.load(f) data = json.load(f)
except: except:
print("Error: The config file is corrupt. Please run with the --config flag set.") print("Error: The config file is corrupt. Please run with the --config flag set.")
@ -57,7 +78,7 @@ if 'base_url' not in data or 'model' not in data:
print('Error: missing base url or model please rerun with --config flag') print('Error: missing base url or model please rerun with --config flag')
exit() exit()
ollama = Ollama(base_url=data['base_url'], model=data['model'])
user_input = "\n"+ sys.argv[-1] user_input = "\n"+ sys.argv[-1]
@ -75,5 +96,8 @@ Information: {textwrap.indent(std_input, ' ')}
""" """
#print(prompt) #print(prompt)
output = ollama(prompt) output = generate_text(data['base_url'], data['model'], prompt)
print(output)
console = Console()
console.print(Markdown(f"# askai Output\n{output}"))
print("")