import os
from fastapi import Depends, FastAPI,Form, Request, HTTPException
import re
from datetime import datetime
from pydantic import BaseModel
from typing import Optional
from dotenv import load_dotenv
from openai import OpenAI
from fastapi import HTTPException, status
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy.orm import Session
import db_config.database as db
from db_config.models import Courses
import db_config.schemas as schemas


from knowledge_storm import (
    STORMWikiRunnerArguments,
    STORMWikiRunner,
    STORMWikiLMConfigs,
    truncate_filename
)
from storm.knowledge_storm.lm import OpenAIModel
import zipfile
import uuid
import markdown
import json
from xml.etree.ElementTree import Element, SubElement, ElementTree
from fastapi.responses import FileResponse
from dependencies.helper import (
    send_email,
    read_txt_file,
    read_json_file,
    parse,
    errorResponse,
    successResponse,
    construct_citation_dict_from_article,
    create_manifest,
    generate_resources_from_folder
)
from knowledge_storm.rm import (
    SerperRM
)
# Load environment variables
load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

APP_URL = os.getenv("APP_URL")

OUTPUT_FOLDER = os.getenv("OUTPUT_FOLDER", "./result")
working_dir = "imscc_temp"
content_dir = "wiki_content"

EMAIL_REGEX = r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$'
gpt_35_model_name = "gpt-3.5-turbo"
gpt_4_model_name = "gpt-4o"
# Define GPT model using STORM
openai_kwargs = {
    "api_key": os.getenv("OPENAI_API_KEY"),
    "temperature": 1.0,
    "top_p": 0.9,
}
lm_configs = STORMWikiLMConfigs()
ModelClass = OpenAIModel
conv_simulator_lm = ModelClass(
    model=gpt_35_model_name, max_tokens=500, **openai_kwargs
)
question_asker_lm = ModelClass(
    model=gpt_35_model_name, max_tokens=500, **openai_kwargs
)
outline_gen_lm = ModelClass(model=gpt_4_model_name, max_tokens=400, **openai_kwargs)
article_gen_lm = ModelClass(model=gpt_4_model_name, max_tokens=700, **openai_kwargs)
article_polish_lm = ModelClass(
    model=gpt_4_model_name, max_tokens=4000, **openai_kwargs
)
lm_configs.set_conv_simulator_lm(conv_simulator_lm)
lm_configs.set_question_asker_lm(question_asker_lm)
lm_configs.set_outline_gen_lm(outline_gen_lm)
lm_configs.set_article_gen_lm(article_gen_lm)
lm_configs.set_article_polish_lm(article_polish_lm)


model = OpenAIModel(api_key=OPENAI_API_KEY, model=gpt_35_model_name)
engine_args = STORMWikiRunnerArguments(
        output_dir=OUTPUT_FOLDER
    )
rm = SerperRM(
    serper_search_api_key=os.getenv("SERPER_API_KEY"),
    query_params={"autocorrect": True, "num": 10, "page": 1},
)
runner = STORMWikiRunner(engine_args,lm_configs,rm)


# Initialize FastAPI app
app = FastAPI(title="Sandstorm: AI Assisted Content Generator")

allow_origins=[os.getenv("FRONT_APP_URL")]
# Add CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Replace with your frontend URL
    allow_credentials=True,
    allow_methods=["*"],  # Allows all methods like GET, POST, PUT, etc.
    allow_headers=["*"],  # Allows all headers
)