Viewing File: /home/ubuntu/codegamaai-test/general_bot/hell.py

import json
import gradio as gr
import torch
from transformers import pipeline, set_seed

# Initialize the pipeline with your model
pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta", torch_dtype=torch.bfloat16, device_map="auto")
history_file = "conversation_history.json"  # File to store conversation history

def load_history():
    # Load conversation history from a JSON file
    try:
        with open(history_file, 'r') as file:
            return json.load(file)
    except FileNotFoundError:
        return []  # Return an empty list if the file does not exist

def save_history(conversation_history):
    with open(history_file, 'w') as file:
        json.dump(conversation_history, file)

def chat_with_bot(user_input):
    conversation_history = load_history()
    conversation_history.append({"role": "user", "content": user_input})
    
    system_prompt = {
        "role": "system",
        "content": "You are a friendly chatbot named Liya which helps users with their queries. Refrain from sharing false information. If you don't know the answer to a question, just say please rephrase your question. Show excitement and eagerness to help the user and be creative with your responses."
    }
    model_input = [system_prompt] + conversation_history
    prompt = pipe.tokenizer.apply_chat_template(model_input, tokenize=False, add_generation_prompt=True)
    outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.2, top_k=50, top_p=0.95)
    generated_text = outputs[0]["generated_text"]
    
    parts = generated_text.split("<|assistant|>")
    last_response = parts[-1].strip() if parts else ""
    conversation_history.append({"role": "assistant", "content": last_response})
    save_history(conversation_history[1:])
    #print(generated_text)
    #print(last_response)
    return last_response

# Setup Gradio interface
iface = gr.Interface(fn=chat_with_bot, inputs="text", outputs="text", title="Chat with Liya")
iface.launch(share=True,server_name="54.39.104.93", server_port=5009)
Back to Directory File Manager