updated askai
This commit is contained in:
parent
d659c54402
commit
ffe352a91f
@ -1,5 +1,5 @@
|
||||
# askai
|
||||
askai is an interactive Linux tool that processes data from stdin according to user-specified instructions. It utilizes LangChain's Ollama model for generating responses.
|
||||
askai is an interactive Linux tool that processes data from stdin according to user-specified instructions. It utilizes Ollama and opensource ai models for generating responses.
|
||||
|
||||
## Usage
|
||||
To use the tool, simply input your information through stdin and pass your command line argument as a parameter. If you want to save default settings or update them, run askai with `--config` flag.
|
||||
|
42
askai
42
askai
@ -1,10 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from langchain.llms import Ollama
|
||||
import json
|
||||
import requests
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
from rich.markdown import Markdown
|
||||
from rich.console import Console
|
||||
|
||||
|
||||
def generate_text(url, model, prompt):
|
||||
url = str(url)+"/api/generate"
|
||||
data = {
|
||||
"model": str(model),
|
||||
"prompt": str(prompt),
|
||||
"stream": False,
|
||||
"options": {
|
||||
"temperature": 0.6,
|
||||
}
|
||||
}
|
||||
response = requests.post(url, json=data)
|
||||
if response.text == "404 page not found":
|
||||
print(f"\n\nError: Ollama does not appear to be up or the url is wrong. url: {url}\n\n")
|
||||
exit()
|
||||
text = json.loads(response.text)
|
||||
return text["response"]
|
||||
|
||||
|
||||
|
||||
if '--config' in sys.argv:
|
||||
# Check that no
|
||||
@ -13,12 +34,12 @@ if '--config' in sys.argv:
|
||||
exit()
|
||||
|
||||
# Check if the config.json file exists
|
||||
if not os.path.isfile(os.path.expanduser('~/.config.json')):
|
||||
if not os.path.isfile(os.path.expanduser('~/.askai_config.json')):
|
||||
# Create a new configuration file
|
||||
f = open(os.path.expanduser('~/.config.json'), 'x')
|
||||
f = open(os.path.expanduser('~/.askai_config.json'), 'x')
|
||||
f.close()
|
||||
|
||||
f = open(os.path.expanduser('~/.config.json'), 'w')
|
||||
f = open(os.path.expanduser('~/.askai_config.json'), 'w')
|
||||
data = {}
|
||||
|
||||
# Ask the user to set the model details
|
||||
@ -47,7 +68,7 @@ if '--config' in sys.argv:
|
||||
else:
|
||||
# Load model details from the config file
|
||||
try:
|
||||
with open(os.path.expanduser('~/.config.json'), 'r') as f:
|
||||
with open(os.path.expanduser('~/.askai_config.json'), 'r') as f:
|
||||
data = json.load(f)
|
||||
except:
|
||||
print("Error: The config file is corrupt. Please run with the --config flag set.")
|
||||
@ -57,7 +78,7 @@ if 'base_url' not in data or 'model' not in data:
|
||||
print('Error: missing base url or model please rerun with --config flag')
|
||||
exit()
|
||||
|
||||
ollama = Ollama(base_url=data['base_url'], model=data['model'])
|
||||
|
||||
|
||||
user_input = "\n"+ sys.argv[-1]
|
||||
|
||||
@ -75,5 +96,8 @@ Information: {textwrap.indent(std_input, ' ')}
|
||||
"""
|
||||
#print(prompt)
|
||||
|
||||
output = ollama(prompt)
|
||||
print(output)
|
||||
output = generate_text(data['base_url'], data['model'], prompt)
|
||||
|
||||
console = Console()
|
||||
console.print(Markdown(f"# askai Output\n{output}"))
|
||||
print("")
|
||||
|
Loading…
Reference in New Issue
Block a user