minte9
LearnRemember / MLEARNING



API Request

 
""" Install the library and create a new secret key
https://platform.openai.com/account/api-keys

pin install openai

.env
OPENAI_API_KEY=xxx
"""

import openai
import os
from dotenv import load_dotenv

# Loads variables from .env into environment
load_dotenv()  

# Setup OpenAI API key
openai.api_key = os.getenv("OPENAI_API_KEY")

question = "What is flask python?"

response = openai.ChatCompletion.create(
    model="gpt-3.5-turbo",
    messages=[{"role": "user", "content": question}],
    max_tokens=256,
    n=1,
    stop=None,
    temperature=0.7
)

answer = response['choices'][0]['message']['content']

print(question)
print(answer) 

"""
    What is flask python?
    Flask is a lightweight web framework written in Python. 
    It is used to build web applications quickly and easily. 
"""

Streaming Completion

 
""" Streaming with the OpenAI API allows you to get partial results 
and process them as they become available, which is more efficient and responsive. 
"""

import openai
import os
from dotenv import load_dotenv

# Loads variables from .env into environment
load_dotenv()  

# Setup OpenAI API key
openai.api_key = os.getenv("OPENAI_API_KEY")

# User question
print("Please enter your question (press Enter to submit):")
question = input()

response = openai.ChatCompletion.create(
    model="gpt-3.5-turbo",
    messages=[{"role": "user", "content": question}],
    max_tokens=256,
    n=1,
    stop=None,
    temperature=0.7,
    stream=True
)

for chunk in response:
    content = chunk["choices"][0]["delta"].get("content", "")
    print(content, end="", flush=True)
    
print("\n")

"""
    Please enter your question (press Enter to submit):
    What is Flask?
    Flask is a lightweight and flexible web application framework for Python. 
    It is designed to make it easy to build web applications ... 
"""

Conversation History

 
""" You can create a conversation where the model remembers the context.
You'll need to include that conversation history in subsequent requests.
"""

import openai
import os, sys

from dotenv import load_dotenv
load_dotenv()  

# Setup OpenAI API key
openai.api_key = os.getenv("OPENAI_API_KEY")

# Initialize conversation history
conversation_history = []

while True:
    # User question
    print("\nPlease enter your question (or 'exit' to end):")
    question = input()

    if question.lower() == 'exit':
        break

    # Add user question to conversation history
    conversation_history.append({"role": "user", "content": question})

    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=conversation_history,
        max_tokens=256,
        n=1,
        stop=None,
        temperature=0.7,
        stream=True
    )

    for chunk in response:
        content = chunk["choices"][0]["delta"].get("content", "")
        print(content, end="", flush=True)

    # Add API response to conversation history
    conversation_history.append({"role": "system", "content": content})

    
print("Conversation ended.")

Context

 
""" You can add a context message at the start of the conversation 
in order to instruct the model. 
"""

import openai
import os, sys

# Setup OpenAI API key
openai.api_key = os.environ.get("OPENAI_API_KEY")

# Initialize conversation history
conversation_history = []

# Context for keeping answers short
context_message = {
    "role": "system",
    "content": "System: Please keep the answers short"
}
conversation_history.append(context_message)

# Question stream
questions = [
    "What is Flask? Keep the answers short.",
    "What's the current version?"
]

for question in questions:
    print("\nQuestion:", question)

    # Add user question to conversation history
    conversation_history.append({"role": "user", "content": question})

    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=conversation_history,
        max_tokens=64,
        n=1,
        stop=None,
        temperature=0.7,
        stream=True
    )

    for chunk in response:
        content = chunk["choices"][0]["delta"].get("content", "")
        print(content, end="", flush=True)

    # Add API response to converstion history
    conversation_history.append({"role": "system", "content": content})

"""
    Question: What is Flask? Keep the answers short.
    Flask is a lightweight web framework for Python.
    Question: What's the current version?
    The current version of Flask is 1.1.2
"""

🚀 AI Agent

 
""" Unlike traditional scripts that follow fixed instructions, 
AI agents can analyze input dynamically, decide what to do next 
based on that prior context and adapt to new situations. 
"""

import openai
import os, sys

from dotenv import load_dotenv
load_dotenv()  

# Setup OpenAI API key
openai.api_key = os.getenv("OPENAI_API_KEY")

# Query openai
def query_chatgpt(prompt):
    print("\n" + prompt)

    response = openai.ChatCompletion.create(
        model="gpt-4.1",
        messages=[{"role": "user", "content": prompt}],
    )

    return response['choices'][0]['message']['content'].strip()

# Input
topic = "function"

languages = {
    "Python": f"Explain the '{topic}' topic in Python. ",
    "PHP": f"Explain the concept similar to {topic} in PHP. ",
    "Java": f"Explain the equivalent concept of {topic} in Java. ",
}

context = "Keep the answer short, without examples."

for lang, prompt in languages.items():

    summary = query_chatgpt("--- " + prompt + context)
    
    # AI Agent part 
    # The agent decides what to do next based on results
    prompt_2 = f"How difficult is the topic '{topic}' in {lang} for beginners? Just reply with a number, 1 to 5."
    difficulty = query_chatgpt(prompt_2)

    # Output
    print("Difficulty: " + difficulty + "\n")
    print(summary)

    if int(difficulty) >= 3:
        code = query_chatgpt(f"Give a code example of {topic} in {lang}")
        qna = query_chatgpt(f"Create 3 beginner questions and answers about {topic} in {lang}.")

        # Output
        print(code)
        print(qna)

"""
    --- Explain the 'function' topic in Python. 
    Keep the answer short, without examples.

    How difficult is the topic 'function' in Python for beginners? 
    Just reply with a number, 1 to 5.
    Difficulty: 2

    In Python, a function is a reusable block of code that performs a specific task. 
    Functions help organize code, make it more readable, and allow code reuse ...

    --- Explain the concept similar to function in PHP. 
    Keep the answer short, without examples.

    How difficult is the topic 'function' in PHP for beginners? 
    Just reply with a number, 1 to 5.
    Difficulty: 2

    In PHP, the concept similar to a function is called a **function** itself. 
    A function is a reusable block of code designed to perform a specific task, ...

    --- Explain the equivalent concept of function in Java. 
    Keep the answer short, without examples.

    How difficult is the topic 'function' in Java for beginners? 
    reply with a number, 1 to 5.
    Difficulty: 2

    In Java, the equivalent concept of a function is called a **method**. 
    Methods are blocks of code defined within a class that perform specific tasks ...
"""

🚀 Deploy AI Agent

 
""" AI Agent that orchestrates Git and FTP operations based on natural language input. 
The agent that can interpret your commands like: 

 - "Export all differences to FTP and GitHub"
 - "Push only python repo updates"
 - "Sync algorithms and PHP pages"
"""

from openai import OpenAI
import os
import subprocess
import json
import datetime
import sys
import sqlite3

from dotenv import load_dotenv
load_dotenv()  

# OpenAI Client
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))

# Define valid repositories and their paths
REPOS = {
    "python":       "/var/www/refresh.local/refresh.ro/Application/github/python-pages/",
    "algorithms":   "/var/www/refresh.local/refresh.ro/Application/github/algorithms-pages/",
    "php":          "/var/www/refresh.local/refresh.ro/Application/github/php-pages/",
    "mlearning":    "/var/www/refresh.local/refresh.ro/Application/github/mlearning-pages/",
    "java":         "/var/www/refresh.local/refresh.ro/Application/github/java-pages/"
}

# Define base FTP path and credentials (preferably load from environment)
FTP_BASE = os.getenv("FTP_BASE")
FTP_USER = os.getenv("FTP_USER")
FTP_PASS = os.getenv("FTP_PASS")

# SQLite persistence
DB_PATH = os.getenv("CURR_DIR") + "mlearning-pages/main/packages/openai/deploy_ai_agent/prompt_cache.db"

def init_db():
    """Initialize SQLite DB and create table if not exists"""
    conn = sqlite3.connect(DB_PATH)
    c = conn.cursor()
    c.execute("""
        CREATE TABLE IF NOT EXISTS prompt_cache (
            id INTEGER PRIMARY KEY AUTOINCREMENT,
            prompt TEXT UNIQUE,
            response TEXT
        )
    """)
    conn.commit()
    conn.close()

def get_cached_response(prompt):
    conn = sqlite3.connect(DB_PATH)
    c = conn.cursor()
    c.execute("SELECT response FROM prompt_cache WHERE prompt = ?", (prompt,))
    row = c.fetchone()
    conn.close()
    return row[0] if row else None

def store_response(prompt, response):
    conn = sqlite3.connect(DB_PATH)
    c = conn.cursor()
    try:
        c.execute("INSERT INTO prompt_cache (prompt, response) VALUES (?,?)", (prompt, response))
        conn.commit()
    except sqlite3.IntegrityError:
        # Already exists - ignore
        pass
    finally:
        conn.close()

def get_action_plan(natural_language_cmd):
    """Use OpenAI to interpret the user command and return repo and ftp instructions."""

    system_prompt = f"""
        You are an AI agent that converts deployment commands into structured JSON instructions.
        
        Valid repositories:
        {',' . join(REPOS.keys())}

        Return a JSON object with:
        - "git": list of repositories to update (subset of the valid ones)
        - "ftp": list of diretories to upload via FTP (same names)

        Examples:
        User: Export only python repo differences to GitHub
        Response: {{ "git": ["python"], "ftp": [] }}

        User: Upload java and php to FTP only
        Response: {{ "git": [], "ftp": ["java", "php"] }}

        User: Export all differences to FTP and GitHub
        Response: {{ 
            "git": ["python", "algorithms", "php", "mlearning", "java"], 
            "ftp": ["python", "algorithms", "php", "mlearning", "java"] 
        }}
    """

    cached = get_cached_response(natural_language_cmd)
    if cached:
        print("? Using cached response from SQLite")
        response_text = cached
    else:
        print("? Sending to OpenAI...")
        response = client.chat.completions.create(
            model="gpt-4.1",
            messages=[
                {"role": "system", "content": system_prompt.strip()},
                {"role": "user", "content": f"Command: {natural_language_cmd}"}
            ]
        )
        response_text = response.choices[0].message.content.strip()
        store_response(natural_language_cmd, response_text)

    # Parse json
    try:
        return json.loads(response_text)
    except json.JSONDecodeError as e:
        print("Error parsing OpenAI response:", e)
        print("Raw response:", response_text)
        return None

def perform_git(repo_name):
    """Pull, commit, and push changes in the specific repo."""
    repo_path = REPOS[repo_name]
    print(f"? Updating Github repo: {repo_name}")
    os.chdir(repo_path)
    subprocess.run(["git", "pull", "origin", "main", "--force"])
    subprocess.run(["git", "add", "."])
    subprocess.run(["git", "commit", "-am", f"{repo_name}-pages update"])
    subprocess.run(["git", "push", "origin", "main"])


def get_today_date():
    from datetime import datetime
    return datetime.today().strftime('%Y-%m-%d')

def get_changed_files(repo_path):
    """Return list of changed files (repo_path) from Git"""
    os.chdir(repo_path)

    try:
        result = subprocess.run(
            ["git", "diff", "--stat", f"@{{{get_today_date()}}}", "--diff-filter=ACRMRT", "--name-only"],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            text=True,
            check=True
        )
        changed_files = result.stdout.strip().split('\n')
        return [f for f in changed_files if f.strip()]
    except subprocess.CalledProcessError as e:
        print("Git diff failed:", e.stderr)
        return []

def perform_ftp(repo_name):
    """Upload changed files to the hosting corresponding FTP Path"""
    local_repo_path = REPOS[repo_name]
    remote_path = f"{FTP_BASE}{repo_name}-pages"

    print(f"? Uploding {repo_name} files to FTP ...")

    changed_files = get_changed_files(local_repo_path)

    if not changed_files:
        print("No changed files to upload")
        return

    for rel_path in changed_files:
        local_file = os.path.join(local_repo_path, rel_path)
        remote_file = f"{remote_path}/{rel_path}"

        # Skip if file doesn't exist (deleted, moved, etc.)
        if not os.path.isfile(local_file):
            continue

        print(f"- {rel_path}")
        subprocess.run(["curl",  "-T", local_file, remote_file, "--user", f"{FTP_USER}:{FTP_PASS}"])


def main():
    init_db()

    if len(sys.argv) > 1:
        # Command passed as CLI argument (e.g. deployai "Sync my php repo")
        user_command = sys.argv[1].strip()
    else:
        # Interactive fallback
        user_command = input("What should I do? \n> ").strip()

    action_plan = get_action_plan(user_command)

    if not action_plan:
        print("No valid action plan. Aborting.")
        return

    print(f"Action plan: {action_plan}")

    for repo in action_plan.get("ftp", []):
        if repo in REPOS:
            perform_ftp(repo)

    for repo in action_plan.get("git", []): 
        if repo in REPOS:
            perform_git(repo)

    print("✅ All tasks completed.")


if __name__ == '__main__':
    main()





References