Fix tokenizer issue

This commit is contained in:
elijah 2024-07-19 00:11:38 +02:00
parent af38cda43e
commit 53d4703cb2

1
app.py
View File

@ -15,7 +15,6 @@ limiter = Limiter(
) )
# Load the tokenizer # Load the tokenizer
tokenizer = AutoTokenizer.from_pretrained("informatiker/Llama-3-8B-Instruct-abliterated")
tokenizer = AutoTokenizer.from_pretrained(os.environ.get('TOKENIZER', 'gpt2')) tokenizer = AutoTokenizer.from_pretrained(os.environ.get('TOKENIZER', 'gpt2'))
api_url = os.environ.get('API_URL', 'https://api.openai.com/v1') api_url = os.environ.get('API_URL', 'https://api.openai.com/v1')