Viewing File: /home/ubuntu/codegamaai-test/rag_drive/RagDrive/app4.py

import json
import os
import subprocess
import sys
import time

import psutil
import requests
import uvicorn
from fastapi import BackgroundTasks, FastAPI, Form
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel

from src.constants import *
from supertokens_fastapi import get_cors_allowed_headers

app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"] 
)

# Add the directory containing the llama model to the system path

# llama_model_directory = current_directory+"/model"
llama_model_directory = os.environ['HAIVE_MODELS_DIR']
llama_model_path = os.path.join(llama_model_directory, "haive_llm.gguf")
# sys.path.append(llama_model_directory)
def unload_llama_model():
    url = "http://localhost:3928/inferences/llamacpp/unloadmodel"
    headers = {'Content-Type': 'application/json'}

    try:
        response = requests.post(url, headers=headers)
        response.raise_for_status()  # Raise an exception for 4xx and 5xx status codes
        result = response.json()
        return result
    except requests.exceptions.RequestException as e:
        print(f"Error making request: {e}")
        return None
def load_llama_model(llama_model_path=llama_model_path, ctx_len=4000, ngl=100):
    # Construct the full path to the llama model
    print("Model Path: ", llama_model_path)
    url = "http://localhost:3928/inferences/llamacpp/loadmodel"
    headers = {'Content-Type': 'application/json'}
    payload = {
        "llama_model_path": llama_model_path,
        "ctx_len": ctx_len,
        "ngl": ngl
    }

    try:
        response = requests.post(url, headers=headers, data=json.dumps(payload))
        response.raise_for_status()  # Raise an exception for 4xx and 5xx status codes
        result = response.json()
        return result
    except requests.exceptions.RequestException as e:
        print(f"Error making request: {e}")
        return None

class ResponseModel(BaseModel):
    message: str
    error: str = None

import threading

# CONFIG_FILE_PATH = "/usr/local/bin/haive/config.json"
CONFIG_FILE_PATH = os.path.join(os.environ['DATA_DIR'], "config.json")

def write_config(status):
    CONFIG_FILE_PATH = os.path.join(os.environ['DATA_DIR'], "config.json")
    config_data = {"status": status}
    with open(CONFIG_FILE_PATH, "w") as config_file:
        json.dump(config_data, config_file)


def get_nitro_pid():
    try:
        # Get the list of all running processes
        all_processes = psutil.process_iter(['pid', 'name'])

        # Find processes with 'nitro' in their name
        nitro_processes = [process.info['pid'] for process in all_processes if 'nitro' in process.info['name'].lower()]

        return nitro_processes
    except Exception as e:
        print(f"Error: {e}")
        return []

def kill_nitro(pid_list):
    if not pid_list:
        print("No Nitro processes found.")
        return

    try:
        for pid in pid_list:
            # Terminate the process with the specified PID
            process = psutil.Process(pid)
            process.terminate()
            print(f"Nitro process with PID {pid} terminated successfully.")
    except Exception as e:
        print(f"Error: {e}")



@app.post("/start")
def start_processes():
    try:
        
        def run_server():
            # Get the App Data directory from system environment variables
            appdata_path = os.path.expandvars('%AppData%')

            # Change directory to %AppData%\nitro
            nitro_path = os.path.join(appdata_path, 'nitro')
            os.chdir(nitro_path)
            print("Changed directory to", nitro_path)
            # Run nitro.exe in subprocess
            subprocess.Popen("nitro.exe", stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, close_fds=True)
            time.sleep(8)
            load_llama_model()
        # Create a thread and start it
        # server_thread = threading.Thread(target=run_server)
        # server_thread.start()

        # Time delay to allow the server to start
        # time.sleep(2)
        # # Change the working directory back to the original directory
        # os.chdir(os.environ['HAIVE_DIR'])
        # Define the function to run in a separate thread
        write_config("enabled")

        return {"message": "Script started successfully"}
    except subprocess.CalledProcessError as e:
        return {"message": "Script started failed"}
    
@app.post("/stop")
def stop_processes():
    try:
        write_config("disable")
        # nitro_pid_list = get_nitro_pid()
        # kill_nitro(nitro_pid_list)
        # unload_llama_model()
        return {"message": "Script stopped successfully"}
    except subprocess.CalledProcessError as e:
        return {"message": "Script stopped Failed"}
        
if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0",port=5005, ssl_keyfile="privkey.pem",ssl_certfile="fullchain.pem")
Back to Directory File Manager