| import streamlit as st |
| from openai import OpenAI |
|
|
| class OpenAIAgent: |
| def __init__(self): |
| self.base_url = "https://api.aimlapi.com/v1" |
| self.api_key = "c496d9094ba54ddb9d66eeeb35a6196f" |
| self.api = OpenAI(api_key=self.api_key, base_url=self.base_url) |
| self.system_prompt = "You are an AI assistant specialized in data analysis, visualization, and insights generation. Provide clear and concise responses." |
|
|
| def get_insights(self, df, prompt): |
| try: |
| data_sample = df.head(5).to_string() |
| data_info = f"Dataset shape: {df.shape}\nColumns: {', '.join(df.columns)}\n" |
| full_prompt = f"{data_info}\n{data_sample}\n\n{prompt}" |
| |
| completion = self.api.chat.completions.create( |
| model="mistralai/Mistral-7B-Instruct-v0.2", |
| messages=[ |
| {"role": "system", "content": self.system_prompt}, |
| {"role": "user", "content": full_prompt}, |
| ], |
| temperature=0.7, |
| max_tokens=500, |
| ) |
| |
| return completion.choices[0].message.content |
| except Exception as e: |
| return f"An error occurred while getting insights: {str(e)}" |
|
|
| def generate_insights(self, df): |
| insight_type = st.selectbox("Select insight type", |
| ["General Insights", "Trend Analysis", "Anomaly Detection", "Predictive Analysis"]) |
| |
| if st.button("Generate Insights"): |
| with st.spinner("Generating insights..."): |
| prompt = f"Analyze the following dataset and provide {insight_type}." |
| ai_response = self.get_insights(df, prompt) |
| st.write(ai_response) |
|
|
| def custom_ai_task(self, df): |
| custom_task = st.text_area("Describe your custom task:") |
| |
| if st.button("Execute Custom Task"): |
| with st.spinner("Processing custom task..."): |
| prompt = f"Perform the following task on the given dataset:\n{custom_task}" |
| ai_response = self.get_insights(df, prompt) |
| st.write(ai_response) |
| '''import openai |
| |
| class OpenAIAgent: |
| def __init__(self): |
| # Make sure to set your OpenAI API key in your environment variables |
| openai.api_key = "c496d9094ba54ddb9d66eeeb35a6196f" |
| |
| def get_insights(self, df, prompt): |
| # Prepare the data for the API call |
| data_sample = df.head(5).to_string() |
| |
| messages = [ |
| {"role": "system", "content": "You are a data analyst assistant."}, |
| {"role": "user", "content": f"Here's a sample of my data:\n{data_sample}\n\nBased on this data, {prompt}"} |
| ] |
| |
| response = openai.ChatCompletion.create( |
| model="gpt 4o", |
| messages=messages, |
| max_tokens=150 |
| ) |
| |
| return response.choices[0].message['content'] |
| ''' |