added --changeDefaultModel to persistantly change default model
This commit is contained in:
parent
b84451114c
commit
2f295974e8
22
README.md
22
README.md
@ -197,10 +197,8 @@ Once you have it all set up, here's how to use it.
|
|||||||
`fabric -h`
|
`fabric -h`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
|
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] [--output [OUTPUT]] [--stream] [--list] [--update] [--pattern PATTERN] [--setup] [--changeDefaultModel CHANGEDEFAULTMODEL] [--local]
|
||||||
[--output [OUTPUT]] [--stream] [--list] [--update]
|
[--claude] [--model MODEL] [--listmodels] [--context]
|
||||||
[--pattern PATTERN] [--setup] [--local] [--claude]
|
|
||||||
[--model MODEL] [--listmodels] [--context]
|
|
||||||
|
|
||||||
An open source framework for augmenting humans using AI.
|
An open source framework for augmenting humans using AI.
|
||||||
|
|
||||||
@ -209,27 +207,23 @@ options:
|
|||||||
--text TEXT, -t TEXT Text to extract summary from
|
--text TEXT, -t TEXT Text to extract summary from
|
||||||
--copy, -C Copy the response to the clipboard
|
--copy, -C Copy the response to the clipboard
|
||||||
--agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys}
|
--agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys}
|
||||||
Use an AI agent to help you with a task. Acceptable
|
Use an AI agent to help you with a task. Acceptable values are 'trip_planner' or 'ApiKeys'. This option cannot be used with any other flag.
|
||||||
values are 'trip_planner' or 'ApiKeys'. This option
|
|
||||||
cannot be used with any other flag.
|
|
||||||
--output [OUTPUT], -o [OUTPUT]
|
--output [OUTPUT], -o [OUTPUT]
|
||||||
Save the response to a file
|
Save the response to a file
|
||||||
--stream, -s Use this option if you want to see the results in
|
--stream, -s Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command.
|
||||||
realtime. NOTE: You will not be able to pipe the
|
|
||||||
output into another command.
|
|
||||||
--list, -l List available patterns
|
--list, -l List available patterns
|
||||||
--update, -u Update patterns
|
--update, -u Update patterns
|
||||||
--pattern PATTERN, -p PATTERN
|
--pattern PATTERN, -p PATTERN
|
||||||
The pattern (prompt) to use
|
The pattern (prompt) to use
|
||||||
--setup Set up your fabric instance
|
--setup Set up your fabric instance
|
||||||
|
--changeDefaultModel CHANGEDEFAULTMODEL
|
||||||
|
Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.
|
||||||
--local, -L Use local LLM. Default is llama2
|
--local, -L Use local LLM. Default is llama2
|
||||||
--claude Use Claude AI
|
--claude Use Claude AI
|
||||||
--model MODEL, -m MODEL
|
--model MODEL, -m MODEL
|
||||||
Select the model to use (GPT-4 by default for chatGPT
|
Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)
|
||||||
and llama2 for Ollama)
|
|
||||||
--listmodels List all available models
|
--listmodels List all available models
|
||||||
--context, -c Use Context file (context.md) to add context to your
|
--context, -c Use Context file (context.md) to add context to your pattern
|
||||||
pattern
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Example commands
|
#### Example commands
|
||||||
|
@ -43,6 +43,8 @@ def main():
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--setup", help="Set up your fabric instance", action="store_true"
|
"--setup", help="Set up your fabric instance", action="store_true"
|
||||||
)
|
)
|
||||||
|
parser.add_argument('--changeDefaultModel',
|
||||||
|
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
|
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
|
||||||
|
|
||||||
@ -77,6 +79,10 @@ def main():
|
|||||||
Update()
|
Update()
|
||||||
Alias()
|
Alias()
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
if args.changeDefaultModel:
|
||||||
|
Setup().default_model(args.changeDefaultModel)
|
||||||
|
print(f"Default model changed to {args.changeDefaultModel}")
|
||||||
|
sys.exit()
|
||||||
if args.agents:
|
if args.agents:
|
||||||
# Handle the agents logic
|
# Handle the agents logic
|
||||||
if args.agents == 'trip_planner':
|
if args.agents == 'trip_planner':
|
||||||
|
@ -51,21 +51,24 @@ class Standalone:
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.model = args.model
|
self.model = args.model
|
||||||
self.claude = claude
|
self.claude = claude
|
||||||
|
try:
|
||||||
|
self.model = os.environ["DEFAULT_MODEL"]
|
||||||
|
except:
|
||||||
if self.local:
|
if self.local:
|
||||||
if self.args.model == 'gpt-4-turbo-preview':
|
if self.args.model == 'gpt-4-turbo-preview':
|
||||||
self.args.model = 'llama2'
|
self.model = 'llama2'
|
||||||
if self.claude:
|
if self.claude:
|
||||||
if self.args.model == 'gpt-4-turbo-preview':
|
if self.args.model == 'gpt-4-turbo-preview':
|
||||||
self.model = 'claude-3-opus-20240229'
|
self.model = 'claude-3-opus-20240229'
|
||||||
|
|
||||||
async def localChat(self, messages):
|
async def localChat(self, messages):
|
||||||
from ollama import AsyncClient
|
from ollama import AsyncClient
|
||||||
response = await AsyncClient().chat(model=self.args.model, messages=messages)
|
response = await AsyncClient().chat(model=self.model, messages=messages)
|
||||||
print(response['message']['content'])
|
print(response['message']['content'])
|
||||||
|
|
||||||
async def localStream(self, messages):
|
async def localStream(self, messages):
|
||||||
from ollama import AsyncClient
|
from ollama import AsyncClient
|
||||||
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
|
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
|
||||||
print(part['message']['content'], end='', flush=True)
|
print(part['message']['content'], end='', flush=True)
|
||||||
|
|
||||||
async def claudeStream(self, system, user):
|
async def claudeStream(self, system, user):
|
||||||
@ -243,6 +246,8 @@ class Standalone:
|
|||||||
if "overloaded_error" in str(e):
|
if "overloaded_error" in str(e):
|
||||||
print(
|
print(
|
||||||
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
|
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
|
||||||
|
if "Attempted to call a sync iterator on an async stream" in str(e):
|
||||||
|
print("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
|
||||||
else:
|
else:
|
||||||
print(f"Error: {e}")
|
print(f"Error: {e}")
|
||||||
print(e)
|
print(e)
|
||||||
@ -261,6 +266,7 @@ class Standalone:
|
|||||||
"https://api.openai.com/v1/models", headers=headers)
|
"https://api.openai.com/v1/models", headers=headers)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
|
print("OpenAI GPT models:\n")
|
||||||
models = response.json().get("data", [])
|
models = response.json().get("data", [])
|
||||||
# Filter only gpt models
|
# Filter only gpt models
|
||||||
gpt_models = [model for model in models if model.get(
|
gpt_models = [model for model in models if model.get(
|
||||||
@ -270,6 +276,13 @@ class Standalone:
|
|||||||
|
|
||||||
for model in sorted_gpt_models:
|
for model in sorted_gpt_models:
|
||||||
print(model.get("id"))
|
print(model.get("id"))
|
||||||
|
print("\nLocal Ollama models:")
|
||||||
|
import ollama
|
||||||
|
ollamaList = ollama.list()['models']
|
||||||
|
for model in ollamaList:
|
||||||
|
print(model['name'].rstrip(":latest"))
|
||||||
|
print("\nClaude models:")
|
||||||
|
print("claude-3-opus-20240229")
|
||||||
else:
|
else:
|
||||||
print(f"Failed to fetch models: HTTP {response.status_code}")
|
print(f"Failed to fetch models: HTTP {response.status_code}")
|
||||||
|
|
||||||
@ -461,6 +474,33 @@ class Setup:
|
|||||||
with open(self.env_file, "w") as f:
|
with open(self.env_file, "w") as f:
|
||||||
f.write(f"CLAUDE_API_KEY={claude_key}")
|
f.write(f"CLAUDE_API_KEY={claude_key}")
|
||||||
|
|
||||||
|
def default_model(self, model):
|
||||||
|
""" Set the default model in the environment file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model (str): The model to be set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
model = model.strip()
|
||||||
|
if os.path.exists(self.env_file) and model:
|
||||||
|
with open(self.env_file, "r") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
with open(self.env_file, "w") as f:
|
||||||
|
for line in lines:
|
||||||
|
if "DEFAULT_MODEL" not in line:
|
||||||
|
f.write(line)
|
||||||
|
f.write(f"DEFAULT_MODEL={model}")
|
||||||
|
elif model:
|
||||||
|
with open(self.env_file, "w") as f:
|
||||||
|
f.write(f"DEFAULT_MODEL={model}")
|
||||||
|
else:
|
||||||
|
with open(self.env_file, "r") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
with open(self.env_file, "w") as f:
|
||||||
|
for line in lines:
|
||||||
|
if "DEFAULT_MODEL" not in line:
|
||||||
|
f.write(line)
|
||||||
|
|
||||||
def patterns(self):
|
def patterns(self):
|
||||||
""" Method to update patterns and exit the system.
|
""" Method to update patterns and exit the system.
|
||||||
|
|
||||||
@ -486,6 +526,7 @@ class Setup:
|
|||||||
print("Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
|
print("Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
|
||||||
claudekey = input()
|
claudekey = input()
|
||||||
self.claude_key(claudekey.strip())
|
self.claude_key(claudekey.strip())
|
||||||
|
print("Please enter your default model. Press enter to choose the default gpt-4-turbo-preview\n")
|
||||||
self.patterns()
|
self.patterns()
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user