from django.http import JsonResponse,HttpResponse
from django.shortcuts import render , redirect
from .forms import ScrapyForm
from .services import ScrapingService,SpiderFileService
from .models import Record, WebsiteXPath
from django.db import IntegrityError, transaction
from django.contrib import messages

from django.urls import reverse
from django.contrib.auth.decorators import login_required
from .utils import *
from django.db import connection
from django.urls import reverse
from django.core.paginator import Paginator
from django.db.models import Exists, OuterRef, Subquery
from django.forms.models import model_to_dict
from django.shortcuts import get_object_or_404

@login_required
def home(request):
    # Retrieve session data for pagination and search
    current_page = request.session.get('current_page', 1)
    search_query = request.session.get('search_query', '')
    
    # Get the toggle values for scraped and flagged
    scraped = request.GET.get('scraped', 'false') == 'true'  # Get 'scraped' toggle value
    flagged = request.GET.get('flagged', 'false') == 'true'  # Get 'flagged' toggle value

    # Filter records where 'website' is not null or an empty string
    records_list = Record.objects.exclude(website__isnull=True).exclude(website__exact='')

    # Apply search filter if provided
    if search_query:
        records_list = records_list.filter(website__icontains=search_query)

    # Annotate each record with 'is_scraped' field based on 'WebsiteXPath' table
    is_scraped_subquery = WebsiteXPath.objects.filter(record_id=OuterRef('id')).values('id')
    records_list = records_list.annotate(
        is_scraped=Exists(is_scraped_subquery)
    )

    # Apply filtering based on 'scraped' toggle
    if scraped:
        records_list = records_list.filter(is_scraped=True)
    else:
        records_list = records_list.filter(is_scraped=False)

    # Apply filtering based on 'flagged' toggle (check 'is_scrappable' field)
    if flagged:
        records_list = records_list.filter(is_scrappable=False)  # Assuming '0' or 'False' means flagged
    else:
        records_list = records_list.filter(is_scrappable=True)
    # Ensure distinct records are retrieved
    records_list = records_list.distinct()

    # Paginate records (10 per page)
    paginator = Paginator(records_list, 10)
    page_number = request.GET.get('page', 1)
    
    # Set the page number based on session data
    if int(current_page) > 1:
        page_number = current_page

    # Update session with current page number
    request.session['current_page'] = page_number

    # Get the page object for the current page
    page_obj = paginator.get_page(page_number)

    # Return JSON response for AJAX requests
    if request.headers.get('x-requested-with') == 'XMLHttpRequest':
        return JsonResponse({
            'records': list(page_obj.object_list.values('id', 'website', 'is_scraped')),
            'has_next': page_obj.has_next(),
            'has_previous': page_obj.has_previous(),
            'current_page': page_obj.number,
            'total_pages': page_obj.paginator.num_pages
        })

    # Render template for non-AJAX requests
    return render(request, 'home.html', {
        'page_obj': page_obj,
        'search_query': search_query,  # Pass the search query to the template
    })


@login_required
def scrape_view(request, id):
    if request.method == 'GET':
        try:
            id = int(id)
        except ValueError:
            return render(request, 'error.html', {'message': 'Invalid ID format'})


        # Reset session data
        request.session.pop('form_data', None)
        request.session.pop('scraped_records', None)
        
        record = Record.objects.filter(id=id).first()

        # Check if a WebsiteXPath exists for the given record_id
        website_xpath = WebsiteXPath.objects.filter(record_id=id).first()

        if website_xpath:

            # Convert the instance to a dictionary
            website_xpath_field_value = model_to_dict(website_xpath)

            # If WebsiteXPath exists, prepare the form with initial values
            form = ScrapyForm(initial={
                'city': record.city,
                'state_id': record.state_id,
                'website': record.website,
                'id': id,
                'municipality_main_tel_xpath' : website_xpath_field_value['municipality_main_tel_xpath'],
                'building_department_main_email_xpath' : website_xpath_field_value['building_department_main_email_xpath'],
                'building_department_main_phone_xpath' : website_xpath_field_value['building_department_main_phone_xpath'],
                'chief_building_official_name_xpath' : website_xpath_field_value['chief_building_official_name_xpath'],
            })
        else:
            # Prepare the form with initial values
            form = ScrapyForm(initial={
                'city': record.city,
                'state_id': record.state_id,
                'website': record.website,
                'id': id
            })

    # Render the scrape form with relevant data
    return render(request, 'spidersweb/scrape_form.html', {
        'form': form,
        'id': id,
    })


@login_required
def scrapy_submit(request):
    if request.method == 'POST':
        form_data = request.session.get('form_data')
        scraped_records = request.session.get('scraped_records')

        if form_data and scraped_records:

            # Store records in the database
            if store_records_in_db(request):

                #  with create spider file 
                # file_created = generate_spider_file(request)
                
                # messages.success(request, f'Form submitted and data stored successfully! And Spider File is {"" if file_created else "not"} created')
                
                #  without creation of spider file 
                messages.success(request, f'Form submitted and data stored successfully!')
            else:
                messages.error(request, 'Failed to store data in the database.')
        else:
            messages.error(request, 'Unable to store records in the database.')

    
    return redirect('home')  # Replace with the name of your URL pattern or a URL


@login_required
def view_records(request):
    if request.method == 'POST' and request.headers.get('x-requested-with') == 'XMLHttpRequest':
        form = ScrapyForm(request.POST)
        if form.is_valid():
            data = {
                'city': form.cleaned_data['city'],
                'state_id': form.cleaned_data['state_id'],
                'website': form.cleaned_data['website'],
                'chief_building_official_name_xpath': form.cleaned_data['chief_building_official_name_xpath'],
                'building_department_main_email_xpath': form.cleaned_data['building_department_main_email_xpath'],
                'building_department_main_phone_xpath': form.cleaned_data['building_department_main_phone_xpath'],
                'municipality_main_tel_xpath': form.cleaned_data['municipality_main_tel_xpath'],
            }
            
            xpaths = [
                {"chief_building_official_name": form.cleaned_data['chief_building_official_name_xpath']},
                {"building_department_main_email": form.cleaned_data['building_department_main_email_xpath']},
                {"building_department_main_phone": form.cleaned_data['building_department_main_phone_xpath']},
                {"municipality_main_tel": form.cleaned_data['municipality_main_tel_xpath']},
            ]

            service = ScrapingService(
                city=form.cleaned_data['city'],
                state_id=form.cleaned_data['state_id'],
                website=form.cleaned_data['website'],
                xpaths=xpaths
            )


            records = service.getData()
            old_db_record = Record.objects.filter(
                    city=form.cleaned_data['city'], 
                    state_id=form.cleaned_data['state_id']
                    ).values('building_department_main_phone', 'building_department_main_email', 'municipality_main_tel' ,'chief_building_official_name').first()

            if all(value is None or value == "" for value in records.values()):
                return JsonResponse({
                    'message': 'No valid data was extracted. Please try again with the correct XPaths.'
                }, status=200)

            request.session['form_data'] = data
            request.session['scraped_records'] = records

            result = {
                'message': f"Processed data for {form.cleaned_data['city']}, {form.cleaned_data['state_id']}",
                'records': records,
                'old_db_record' : old_db_record
            }
            return JsonResponse(result)
        else:
            return JsonResponse({'errors': form.errors}, status=400)
    else:
        form = ScrapyForm()
        return render(request, 'spidersweb/scrape_form.html', {'form': form})

@login_required
def check_website(request):
    if request.method == 'POST' and request.headers.get('x-requested-with') == 'XMLHttpRequest':
        form = ScrapyForm(request.POST)
        if form.is_valid():
            website = form.cleaned_data['website']
            city = form.cleaned_data['city']
            state_id = form.cleaned_data['state_id']
            
            record = Record.objects.filter(city=city, state_id=state_id).first()
            if record and record.website != website:
                old_website = record.website
                return JsonResponse(
                    {
                        'needs_update': True,
                        'old_website_url': old_website,
                        'new_website_url': website
                    }
                    )
            else:
                return JsonResponse({'needs_update': False})
        else:
            return JsonResponse({'errors': form.errors}, status=400)
    return JsonResponse({'error': 'Invalid request'}, status=400)

@login_required
def skip_website(request):
    if request.method == 'POST':
        action = request.POST.get('action')
        if action == 'skip_website':

            start_id = request.POST.get('start_id')
            end_id = request.POST.get('end_id')
            # Extract form data
            city = request.POST.get('city')
            state_id = request.POST.get('state_id')
            website = request.POST.get('website')
            skip_website = True
            
            try:
                with transaction.atomic():

                    record = Record.objects.filter(city=city, state_id=state_id).first()

                    obj, created = WebsiteXPath.objects.update_or_create(
                        city=city,
                        state_id=state_id,
                        website=website,
                        defaults={
                            'skip_website': skip_website,
                            'record_id': record.id
                        }
                    )
                    
                # Process the form data as needed
                # For example, you might update the database or perform other actions
                    
                target_url = reverse('home')
                if start_id and end_id :
                    start_id = int(start_id) + 1
                    end_id = int(end_id)
                    target_url = reverse('scrape_view') + f'?start_id={start_id}&end_id={end_id}'

                return JsonResponse({'success': True, 'redirect_url': target_url})
            except Exception as e:
                return JsonResponse({'success': False, 'error': str(e)}, status=400)


@login_required
def flag_website(request):
    if request.method == 'POST':
        action = request.POST.get('action')

        if action == 'flag_website':
            city = request.POST.get('city')
            state_id = request.POST.get('state_id')
            website = request.POST.get('website')
            is_scrappable = False

            try:
                record = Record.objects.filter(city=city, state_id=state_id).first()
                # Update the is_scrappable field to 0
                record.is_scrappable = 0
                record.save()

                # Redirect back to the home page or to another page
                target_url = reverse('home')  # Change this to the URL you want to redirect to
                return JsonResponse({'success': True, 'redirect_url': target_url})

            except Exception as e:
                return JsonResponse({'success': False, 'error': str(e)}, status=400)

    return JsonResponse({'success': False, 'message': 'Invalid request method.'})

def generate_spider_file(request):

    form_data = request.session.get('form_data')

    if form_data:
        city = form_data['city'] 
        state_id = form_data['state_id']
        website_url = form_data['website']
        xpaths = {
            'building_department_main_phone': form_data['building_department_main_phone_xpath'],
            'building_department_main_email': form_data['building_department_main_email_xpath'],
            'municipality_main_tel': form_data['municipality_main_tel_xpath'],
            'chief_building_official_name': form_data['chief_building_official_name_xpath'],
        }

        # Initialize the service
        spider_service = SpiderFileService(city=city, state_id=state_id, website_url=website_url, xpaths=xpaths)

        # Create or update the spider
        spider_service.create_or_update_spider()

        return True

    return False

from django.http import JsonResponse

def update_session(request):
    if request.method == 'POST':
        # Get data from the AJAX request
        page = request.POST.get('page')
        search_query = request.POST.get('search')
        scraped = request.POST.get('scraped')
        flagged = request.POST.get('flagged')

        # Convert scraped to a proper boolean (it will come as a string)
        if scraped.lower() == 'true':
            scraped_bool = True
        elif scraped.lower() == 'false':
            scraped_bool = False
        else:
            scraped_bool = None  # Handle any unexpected values

        if flagged.lower() == 'true':
            flagged_bool = True
        elif flagged.lower() == 'false':
            flagged_bool = False
        else:
            flagged_bool = None  # Handle any unexpected values

        request.session.get('is_flagged')
        request.session.get('is_scraped')

        # Update session variables
        request.session['current_page'] = page
        request.session['search_query'] = search_query
        request.session['is_scraped'] = scraped_bool
        request.session['is_flagged'] = flagged_bool

        # Return success response with current session values for debugging
        return JsonResponse({'status': 'success', 'sessionValues': {
            'current_page': request.session.get('current_page'),
            'search_query': request.session.get('search_query'),
            'is_scraped': request.session.get('is_scraped'),
            'is_flagged': request.session.get('is_flagged')
        }})

    # If the request method is not POST, return a failure response
    return JsonResponse({'status': 'failed'}, status=400)
