
import ast
from django.views import View
from django.db.models import Q
from django.utils import timezone
from django.utils.dateparse import parse_datetime

from apps.core.utils import calculate_priority_score, FTPConnector
from apps.channels.models import Adbreak, ChannelZone, EPGProgram
from apps.campaigns.models import Campaigns, Campaignairtimelog, CampaignPriorityScores, Adspots, CampaignTimeIntervals

from apps.core.utils import check_user
from rest_framework.views import APIView

class VerifsStaticInsert(View):
    template_name = "verifs/index.html"

    def get(self, request):
        # Render the template with the current date included in the context
        return render(request, self.template_name, {})

    def post(self, request):

        if "verifs_file" in request.FILES:

            verifs_file = request.FILES["verifs_file"]

            if verifs_file.name.endswith(".ver"):
                #
                try:
                    import xml.etree.ElementTree as ET

                    # All operations in one line
                    last_part = os.path.splitext(verifs_file.name)[0].split('-')[-1]
                    #
                    tree = ET.parse(verifs_file)
                    root = tree.getroot()

                    results = []
                    namespace = {"ns": "http://www.scte.org/schemas/118-3/201X"}

                    for cue_message in root.findall("ns:CueMessage", namespace):
                        network_name = root.attrib.get("networkName")
                        zone_name = root.attrib.get("zoneName")

                        # Retrieve the channel zone information associated with the playlist's verifs file
                        channel_zone = ChannelsZone.objects.filter(
                            networkname=network_name,
                            verifs_number=last_part
                        ).first()

                        print(channel_zone)

                        broadcast_date = root.attrib.get("broadcastDate")

                        for spot in cue_message.findall("ns:Spot", namespace):
                            traffic_id = spot.attrib.get("trafficId")
                            spot_id = spot.attrib.get("spotId")
                            air_time = spot.attrib.get("airTime")
                            air_length = spot.attrib.get("airLength")
                            air_status_code = spot.attrib.get("airStatusCode")
                            revision = spot.attrib.get("revision")

                            results.append({
                                "networkname": channel_zone.id_channel.channel_name,
                                "zonename": channel_zone.id_zone_channel,
                                "broadcastDate": broadcast_date,
                                "trafficId": traffic_id,
                                "spotId": spot_id,
                                "airTime": air_time.replace('T', ' ').split('+')[0],
                                "airLength": air_length,
                                "airStatusCode": air_status_code,
                                "revision": revision
                            })

                            Verifs.objects.update_or_create(
                                networkname = channel_zone.id_channel.channel_name,
                                zonename = channel_zone.id_zone_channel,
                                broadcastDate = broadcast_date,
                                trafficId = traffic_id,
                                spotId = spot_id,
                                airTime = air_time.replace('T', ' ').split('+')[0],
                                airLength = air_length,
                                airStatuscode = air_status_code,
                                revision = revision,
                                vercomplete = "false"
                            )


                    return JsonResponse({"status": "Success", "message": "Verifs Collected successfully.", "result": results})
                except Exception as e:
                    print(e)
                    return JsonResponse({"status": "Failed", "message": str(e)})
            else:
                return JsonResponse({"status": "Failed", "message": "Please upload a valid Verifs file."})
        else:
            return JsonResponse({"status": "Failed", "message": "No file uploaded."})


def send_vast(request):
    from threading import Thread
    thread = Thread(target=call_vast_api,args=("Leila","Feuilleton",62,47399))
    thread.start()
    print(thread.join())

def get_last_week(date):
    # Parse the string into a datetime object
    timestamp = datetime.datetime.strptime(date, "%Y-%m-%d %H:%M:%S")

    # Subtract one week
    one_week_ago = timestamp - timedelta(weeks=1)

    # Convert back to string representation

    return one_week_ago

class PlaylistVast(APIView):
    def post(self,request):
        from django.core import serializers

        last_week = get_last_week(request.POST.get('ads_schedule'))
        last_week_day = last_week.strftime('%Y-%m-%d')
        last_week_minut = last_week.strftime('%H:%M:00')
        # print(last_week_minut)
        # Get SFR analytics volume for last week
        try:
            analytics_data = Sfr_analytics.objects.get(
                day=last_week_day,
                minute=last_week_minut,
                sfr_channel_name="2M Maroc"
            )
            total_volume = round(float(analytics_data.purcent) * 4500000 / 17)
            print('Analytic SRF: ',analytics_data.minute)

        except Sfr_analytics.DoesNotExist:
            total_volume = 0
            print({"message": "SFR Not Found"})
        finally:
            try:
                # get epg for last week based on start and interval
                print({"EPG":{"last_week":last_week_minut}})
                last_week_epg = f"{last_week_day} {last_week_minut}"
                epg_data = Epg.objects.filter(start_time__lte=last_week_epg,end_time__gte=last_week_epg).first()
                print("EPG: ",epg_data.emission_name)
                # serialized_data = serializers.serialize('json', epg_data)
                # # Convert serialized data to Python data (list of dictionaries)
                # deserialized_data = json.loads(serialized_data)
                # call ALMA API
                # thread = Thread(target=call_vast_api,args=("Leila","Feuilleton",(emission.end_time - emission.start_time).total_seconds(),total_volume))
                # thread.start()
                #
                # return JsonResponse(deserialized_data)
            except Epg.DoesNotExist:
                print({"message": "EPG Not Found"})
        return JsonResponse({"data":last_week})

def alma_callback(url,headers,reply):

    response = requests.get(url, headers=headers)
    # Update Vast Response Status
    if response.status_code == 200:
        reply.status = response.text
        reply.save()
        return response

def answer_Alma(request):

    updated_replies = VastResponse.objects.filter(datetime_timestamp__icontains='2024-04-17',status=None)
    verify_replies = VastResponse.objects.filter(datetime_timestamp__icontains='2024-04-17')
    # for reply in updated_replies:
    #     reply.delete()
    #     reply.save()

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    if updated_replies:
        # Creating Threading Pool for Tracking Completed links
        # Send requests to tracking completed links
        with ThreadPoolExecutor(max_workers=20) as executor:
            futures = [executor.submit(alma_callback, reply.tracking_complete, headers=headers,reply=reply) for reply in updated_replies]
            result = [future.result() for future in futures]

def update_vast_response_v2(request):
    from .tasks import update_vast

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    print("called")
    updater = update_vast.delay()

def update_vast_response(request):
    import csv
    # from core.models import Trackinglink

    # verify_replies = VastResponse.objects.filter(
    #     datetime_timestamp__icontains='2024-10-17',
    #     tracking_start_status=None,
    #     tracking_firstquartile_status=None,
    #     tracking_midpoint_status=None,
    #     tracking_thirdquartile_status=None,
    #     #
    #     impression_sprintserve_status=None,
    #     impression_double_click_status=None)
    verify_replies = 10

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }

    # Function to make requests asynchronously
    # def make_requests(url,reply):
    #     from urllib.parse import urlparse, parse_qs
    #     parsed_url = urlparse(url)
    #     print(reply)
    #     # Get the query parameters
    #     query_params = parse_qs(parsed_url.query)
    #     response = requests.get(url=url,headers=headers)
    #     print(response.status_code)
    #     if response.text != None:
    #         # Get the value of the 'event' parameter
    #         event_value = query_params.get('event')

    #         if "js_start" in url :
    #             # print(reply)
    #             reply.tracking_start_status = response.text
    #             reply.save()
    #         elif "js_first_quartile" in url:
    #             reply.tracking_firstquartile_status = response.text
    #             reply.save()
    #         elif "js_midpoint" in url:
    #             reply.tracking_midpoint_status = response.text
    #             reply.save()
    #         elif "js_third_quartile" in url:
    #             reply.tracking_thirdquartile_status = response.text
    #             reply.save()
    #         elif "vast_impression" in url:
    #             reply.impression_sprintserve_status = response.text
    #             reply.save()
    #         elif "doubleclick" in url:
    #             reply.impression_double_click_status = response.text
    #             reply.save()
    #         elif "js_complete" in url:
    #             print("Check: ",response.text)
    #             reply.tracking_completed_status = response.text
    #             reply.save()

    #         else:

    #             reply.impression_double_click_status = response.text
    #             reply.save()
    def make_request_lego(url):
        from urllib.parse import urlparse, parse_qs

        parsed_url = urlparse(url)
        # Get the query parameters
        query_params = parse_qs(parsed_url.query)
        response = requests.get(url=url,headers=headers)

        print(response.status_code)
        return response


    verify_replies = 1
    # request_counts = 0
    # tracking_links = [
    #     "https://example.com",
    #     "https://example.com",
    #     "https://example.com",
    #     "http://examp.com/test"
    # ]
    tracking_links = {
        "City":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32770836.406082389;dc_trk_aid=598249260;dc_trk_cid=223908355;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;gdpr=$%7BGDPR%7D;gdpr_consent=$%7BGDPR_CONSENT_755%7D;ltd=;dc_tdv=1",
            # "https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"
            ],
        "Ninja-GO":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32609683.406115909;dc_trk_aid=598248921;dc_trk_cid=223962908;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;gdpr=$%7BGDPR%7D;gdpr_consent=$%7BGDPR_CONSENT_755%7D;ltd=;dc_tdv=1",
            # "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
        ],
        "Technic":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32623870.406579509;dc_trk_aid=598397817;dc_trk_cid=224003872;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;ltd=;dc_tdv=1"
            ],
            # "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
            # "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"]
    }
    # tracking_links = {
    #     "ninjago": [
    #         "https://example.com",
    #         "https://example.com"
    #     ],
    #     "lego": [
    #         "https://example.com"
    #     ]
    # }
    with ThreadPoolExecutor(max_workers=10) as executor:
        csv_filename = "/var/www/html/DAI27/media/Vast-3.csv"
        with open(csv_filename, 'a', newline='') as csvfile:
            writer = csv.writer(csvfile)
            # Loop through groups and track each link
            with ThreadPoolExecutor(max_workers=100) as executor:
                with open(csv_filename, 'w', newline='') as csvfile:
                    writer = csv.writer(csvfile)
                    writer.writerow(['Link', 'Status Code', 'Number of Requests'])
                    for spot, urls in tracking_links.items():
                        for url in urls:
                            # Ensure we have 100 futures to handle 100 requests per link
                            futures = [executor.submit(make_request_lego, url) for _ in range(verify_replies)]

                            # Track request count and status codes
                            request_count = 0
                            # Process each future result
                            for future in futures:
                                try:
                                    res = future.result()
                                    request_count += 1

                                    # Save the result in the database
                                    trakcing = Trackinglink.objects.create(
                                        spot=spot,
                                        url=url,
                                        status_code=res.status_code,
                                        request_count=request_count
                                    )

                                    writer.writerow([url, res.status_code, request_count])
                                except Exception as e:
                                    raise e
                                    # print(f"Error processing request: {e}")

                        # Update or create the link in the database
                        # tracking_link, created = TrackingLink.objects.update_or_create(
                        #     spot=spot,
                        #     url=url,
                        #     defaults={
                        #         'status_code': status_code,
                        #         'request_count': request_counts
                        #     }
                        # )

                        # Write to the CSV file
                        # writer.writerow([url, status_code, request_counts])

    # request_counts = 0
    # with ThreadPoolExecutor(max_workers=100) as executor:
    #     futures = {
    #         executor.submit(make_request_lego, url): url for url in tracking_links for _ in range(verify_replies)
    #     }
    #     csv_filename = "/var/www/html/DAI27/media/Vast.csv"
    #     with open(csv_filename, 'a', newline='') as csvfile:
    #         writer = csv.writer(csvfile)
    #         for future in futures:
    #             try:
    #                 request_counts+=1
    #                 response = future.result()
    #                 writer.writerow([future.result(), response.status_code,request_counts],)
    #             except Exception as e:
    #                 writer.writerow([future.result(), 'Error'])
    #                 print(f"Error: {e}")


    # Create a ThreadPoolExecutor with max_workers=20
    # with ThreadPoolExecutor(max_workers=100) as executor:
    #     # Submit requests for various URLs
    #     futures = {
    #         executor.submit(make_requests, url,reply): url for reply in range(verify_replies) for url in [
    #             reply.tracking_start,
    #             reply.tracking_firstquartile,
    #             reply.tracking_midpoint,
    #             reply.tracking_thirdquartile,
    #             reply.impression_sprintserve,
    #             reply.impression_double_click,
    #             reply.tracking_complete


    #         ]
    #     }

        # Collect results
        # results = {futures[future]: future.result() for future in futures}

def run_task_view(request):
    from .tasks import sample_task
    sample_task.delay()  # Schedule the task to run in the background
    return HttpResponse("Task is running in the background!")

def update_vast_response_v3(request):

    from datetime import datetime
    import pytz
    from django.db.models import Q

    # Get the current date and time
    current_datetime = datetime.now()

    # Format the date in 'YYYY-MM-DD' format
    report_date = current_datetime.strftime('%Y-%m-%d')
    # verify_replies = VastResponse.objects.filter(
    #     datetime_timestamp__icontains=report_date,
    #     (Q(impression_sprintserve_status=None) | Q(impression_double_click_status=None))
    # )
    verify_replies = VastResponse.objects.filter(
        datetime_timestamp__icontains=report_date,
        tracking_start_status=None,
        tracking_firstquartile_status=None,
        tracking_midpoint_status=None,
        tracking_thirdquartile_status=None,
        #
        impression_sprintserve_status=None,
        impression_double_click_status=None)

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }

    # Function to make requests asynchronously
    def make_requests(url,reply):
        from urllib.parse import urlparse, parse_qs
        parsed_url = urlparse(url)

        # Get the query parameters
        query_params = parse_qs(parsed_url.query)
        response = requests.get(url=url,headers=headers)
        if response.text != None:
            # Get the value of the 'event' parameter
            event_value = query_params.get('event')

            if "js_start" in url :
                # print(reply)
                reply.tracking_start_status = response.text
                reply.save()
            elif "js_first_quartile" in url:
                reply.tracking_firstquartile_status = response.text
                reply.save()
            elif "js_midpoint" in url:
                reply.tracking_midpoint_status = response.text
                reply.save()
            elif "js_third_quartile" in url:
                reply.tracking_thirdquartile_status = response.text
                reply.save()
            elif "vast_impression" in url:
                reply.impression_sprintserve_status = response.text
                reply.save()
            elif "doubleclick" in url:
                reply.impression_double_click_status = response.text
                reply.save()
            elif "js_complete" in url:
                print("Check: ",response.text)
                reply.tracking_completed_status = response.text
                reply.save()

            else:

                reply.impression_double_click_status = response.text
                reply.save()





    # Create a ThreadPoolExecutor with max_workers=20
    with ThreadPoolExecutor() as executor:
        # Submit requests for various URLs
        futures = {
            executor.submit(make_requests, url,reply): url for reply in verify_replies for url in [
                # reply.tracking_start,
                # reply.tracking_firstquartile,
                # reply.tracking_midpoint,
                # reply.tracking_thirdquartile,
                reply.impression_sprintserve,
                reply.impression_double_click,
                # reply.tracking_complete

            ]
        }

class VastReport(APIView):

    def get(self,request):
        from datetime import datetime
        from .models import VastResponseGo

        today = datetime.now().strftime("%Y-%m-%d")

        campaigns_list = ["Too Good To Go 30s","Too Good To Go 15s"]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)
        vast_total_impressions = VastResponseGo.objects.filter(
            datetime_timestamp__contains=today,
            impression_sprintserve_status__isnull=False,
            ).count()

        # get yesterday impression to compare it with today
        prev_vast_total_impression = VastResponseGo.objects.filter(
            datetime_timestamp__contains=(datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d"),
            impression_sprintserve_status__isnull=False,
            ).count()


        return render(request,"core/vast_report.html",{
            "campaigns":campaigns,
            "vast_impression":vast_total_impressions,
            "prev_impression":prev_vast_total_impression})

    def post(self, request, format=None):

        from datetime import datetime
        from django.db.models import Count
        from django.db.models.functions import TruncDate


        start = request.POST.get("start", datetime.now().strftime("%Y-%m-%d"))
        end = request.POST.get("end", datetime.now().strftime("%Y-%m-%d"))
        # Get 'start' and 'end' dates from request or use current date as default
        if start and end:

            print("start: ",start)
            print("end: ",end)
            # Filter VastResponse objects by date range

            result = self.get_campaigns(start=start,end=end)
            return JsonResponse(result, safe=False)
        else:
            today = datetime.now().strftime("%Y-%m-%d")
            last_week =  (datetime.now() - timedelta(days=7)).strftime("%Y-%m-%d")
            results = self.get_campaigns(start=last_week,end=today)
            return JsonResponse(results,safe=False)
            print(f"we will get campaigns from {today} intel {last_week}")

    def get_campaigns(self,start,end):
        from django.db.models import Count
        from django.db.models.functions import TruncDate

        vast_queryset = VastResponse.objects.filter(
                datetime_timestamp__date__gte=start,
                datetime_timestamp__date__lte=end
            )

        # Group data by date
        vast_data_grouped = vast_queryset.annotate(date=TruncDate('datetime_timestamp')) \
                                        .values('date') \
                                        .annotate(date_count=Count('date')) \
                                        .order_by('date')

        result = []

        # Iterate over each date group
        for date_group in vast_data_grouped:
            date = date_group['date']
            date_count = date_group['date_count']
            campaigns_data = Adspots().get_spots_impressions(start_date=date)

            # Add data for the current date to the result list
            result.append({
                'date': date,
                'count': date_count,
                'campaigns': campaigns_data
                # [
                #     {'campaign_name': campaign, 'ad_spots': ad_spots}
                #     for campaign, ad_spots in campaigns_data.items()
                # ]
            })
        return result

class VastChart(APIView):
    def get(self,request):
        campaigns_list = ["Too Good To Go 30s","Too Good To Go 15s"]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)
        print(campaigns)
        return render(request,"core/vast_call.html",{"campaigns":campaigns})

class LoadCampaign(APIView):
    def get(self, request):
        campaign_name = request.GET.get("campaign_name")

        try:
            # Fetch campaign object
            campaign_obj = Campaigns.objects.get(name=campaign_name)

            # Retrieve spots and convert to list of dictionaries
            spots = Adspots.objects.filter(id_campaign=campaign_obj).values(
                'filename'  # Add other fields as needed
            )

            # Convert to list and return as JSON response
            return JsonResponse(list(spots), safe=False)

        except Campaigns.DoesNotExist:
            return JsonResponse({"error": "Campaign not found"}, status=404)

# def vastresponse(request):
#     # query = Adspots().get_impressions(start_date="2024-10-29")
#     query = Adspots().get_spots_impressions(start_date="2024-11-01")
#     return JsonResponse(query,safe=False)

class CallVast(APIView):


    def get(self,request):
        return render(request,"core/vast_call.html",{"campaigns":self.get_campaigns()})

    def post(self,request):
        from .tasks import call_vast
        import requests
        from concurrent.futures import ThreadPoolExecutor

        campaign_name  = request.POST.get("campaign_id")
        # campaign_name = "Friends"
        adspot = request.POST.get("adspots")
        num_request = request.POST.get("num_request")
        num_request = int(num_request)
        campaign_obj = Campaigns.objects.get(name=campaign_name)
        if campaign_name and adspot and num_request > 0:
            # end_date = datetime.strptime(campaign_obj.end_day, "%Y-%m-%d").date()
            # if end_date > datetime.today().date():
            #     print(f"Campaign: {campaign_name} is Expired!")
            # else:
            print("Campaign name: ",campaign_name)
            print("Campaign END DATE: ",campaign_obj.end_day)
            params = {
                'w': '720',
                'h': '567',
                'content_genre': "Dessin Anime",
                'content_title': "Abtal albihar",
                'language': 'ar-MA',
                'pod_max_dur': 20,
                'channel_name': '2M_TV',
                'country': 'France'
            }
            link_mapping = {
                "Too Good To Go 30s": "https://tv.springserve.com/vast/881543?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
                "Too Good To Go 15s": "https://tv.springserve.com/vast/881550?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"
            }

            # *Links to send you have to comment them one by one

            #! Weaber
            # url = "https://tv.springserve.com/vast/769609"

            #! City
            # url = 'https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}'

            #! Ninjago
            # url = "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            #! Technic
            # url = "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            #! Star Wars
            # url = "https://tv.springserve.com/vast/852369?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            headers = {
                'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
            }
            print("Link: ",link_mapping[campaign_name])
            vast = call_vast.delay(url=link_mapping[campaign_name],num_requests=num_request,params=params,spot=adspot)

        return render(request,"core/vast_call.html",{'campaigns':self.get_campaigns()})

    def get_campaigns(self):
        campaigns_list = ["City", "Harry_Potter", "Technic", "StarWars",]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)

        return campaigns

def update_report(request):
    spot = Adspots.objects.get(filename="LegoPorsche.ts")
    vasts = VastResponse.objects.filter(ad_spot=spot)
    print(vasts.count())
    # return JsonResponse()


def vast_handling_old(url,headers,params,call_num,spot):
    import requests
    from requests.auth import HTTPProxyAuth
    from concurrent.futures import ThreadPoolExecutor
    from urllib3.util import parse_url
    import random
    import datetime
    import os
    import xml.etree.ElementTree as ET
    
    from bs4 import BeautifulSoup

    # Create a folder with current datetime
    current_datetime = datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')
    folder_path = os.path.join(os.getcwd(), str(current_datetime)+"_alma")
    # get adpots
    # Ninja GO 2401843p2gl.ts"
    # City  LegoMack.ts
    # Technic LegoMack.ts
    # Star Wars 2403484p2gl.ts

    adspot = Adspots.objects.get(filename=spot)
    # print("Adspot: ",adspot.filename)s
    with transaction.atomic():
        response = requests.get(url, headers=headers, params=params)
        if response.status_code == 200:
            vast_response = None #! need an edit
            
            if vast_response:
                # print("the vast response is :", vast_response)
                root = ET.fromstring(vast_response)
                print(vast_response)
                
                print("the ROOT is :", root)
                print("the ROOT find is :", root.text)# .attrib.get("Ad"))
                current_time = datetime.datetime.now()
                datetime_timestamp = current_time
                datetime_string = current_time.strftime("%Y-%m-%d %H:%M:%S")
                # print(tracking_start)
                # Extract data from the XML
                if root.find(".//Ad"):

                    ad_id = root.find(".//Ad").attrib["id"]
                    tracking_start = root.find(".//Tracking[@event='start']").text
                    tracking_first_quartile = root.find(".//Tracking[@event='firstQuartile']").text
                    tracking_midpoint = root.find(".//Tracking[@event='midpoint']").text
                    tracking_third_quartile = root.find(".//Tracking[@event='thirdQuartile']").text
                    tracking_complete = root.find(".//Tracking[@event='complete']").text
                    # start edit
                    impression_double_click = None
                    impression_double_click = None
                    impression_sprintserve = None
                    # if root.find(".//Impression[@id='SpringServe_Impression_1']") == None:
                    
                        # Try to get the full element content including CDATA
                        # soup = BeautifulSoup(vast_response)
                    double_click = root.find("Impression")
                    print("DOUBLE CLICK: ",double_click)
                    impression_double_click = impression_double_click
                    print("Empression: ",impression_double_click)

                    # else:
                    #     impression_sprintserve = root.find(".//Impression[@id='SpringServe_Impression_1']")
                    #     impression_double_click = root.find(".//Impression[@id='SpringServe_Impression_3P_1']").text


                    # Create and save a new VASTResponse object

                    vast_obj = VastResponse.objects.create(
                        ad_id=ad_id,
                        tracking_start = tracking_start,
                        tracking_firstquartile=tracking_first_quartile,
                        tracking_midpoint=tracking_midpoint,
                        tracking_thirdquartile=tracking_third_quartile,
                        tracking_complete=tracking_complete,
                        datetime_timestamp=datetime_timestamp,
                        datetime_string=datetime_string,
                        vast_url=url,
                        impression_double_click=impression_double_click,
                        impression_sprintserve=impression_sprintserve,
                        ad_spot=adspot
                    )
                    # save traking links
                    if vast_obj.impression_sprintserve:
                        sprintserve_response = requests.get(impression_sprintserve, headers=headers)
                        vast_obj.impression_sprintserve_status = sprintserve_response.text or None

                    if impression_double_click:
                        double_click_response = requests.get(impression_double_click, headers=headers)
                        vast_obj.impression_double_click_status = double_click_response.text or None

                    vast_obj.save()

                else:

                    VastResponse.objects.create(
                            datetime_timestamp=datetime_timestamp,
                            datetime_string=datetime_string,
                            vast_url=url,
                            # ad_spot=adspot
                        )





                print(f'sent request num {call_num}')
            else:
                print(f'Error: Failed to make the VAST request. Status code: {response.status_code}')

    # str_volume = str(total_volume)
    # telegram_message="""
    # ============== ALMA API ==============
    # URL: """+response.url+"""
    #
    # Method: """+str(response.request.method)+"""
    #
    # Headers: """+str(response.request.headers)+"""
    #
    # Number of requests sent: """+str_volume+"""
    # ====================================
    # """
    # send_msg_telegram3(telegram_message)
    #
    # print(f"URL: {response.url}")
    # print(f"Method: {response.request.method}")
    # print(f"Headers: {response.request.headers}")
    # if response.request.body:
    #     print(f"Body: {response.request.body}")

    ## after 10 minutes call to the new function
        # Initialize data as an empty dictionary
    # =============================== SAVING JSON ================
    # json_file_path = '/var/www/html/DAI27/Adtlas_DAI/sync_validation_dates.json'
    # data = {}

    # # Check if the JSON file exists
    # if os.path.exists(json_file_path):
    #     with open(json_file_path, 'r') as json_file:
    #         # Load existing data from the JSON file
    #         data = json.load(json_file)

    #     # Rename 'last_date' to 'previous_date' if it exists
    #     data['previous_date'] = data.get('last_date', None)

    # # Get the current date and time in the specified format and
    # # Update 'last_date' with the current date and time
    # # data['last_date'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    # # Add 10 minutes to the current date and time and format it
    # data['last_date'] = (datetime.datetime.now() + timedelta(minutes=10)).strftime('%Y-%m-%d %H:%M')

    # # Write the updated data back to the JSON file
    # with open(json_file_path, 'w') as json_file:
    #     json.dump(data, json_file, indent=4)
    # ================ END SAVING JSON
    return call_num

def vast_handling(url, headers, params, call_num, spot):
    from .models import VastResponseGo
    adspot = Adspots.objects.get(filename=spot)
 
    with transaction.atomic():
        # Fetch XML from URL
        xml_content = fetch_xml_from_url(url)
        if xml_content is None:
            print("Failed to get content, stopping process")
            return
        
        # Parse XML
        root = parse_xml_content(xml_content)
        if root is None:
            print("Failed to parse XML, stopping process")
            return

        datetime_timestamp = datetime.datetime.now()
        datetime_string = datetime_timestamp.strftime("%Y-%m-%d %H:%M:%S")  

        ad_components, ad_component_list = extract_data_by_attributes(root, "Ad")
        # 
        for i in range(len(ad_component_list)):
            ad_component_data = ad_component_list[i] 
            ad_component  = ad_components[i]
            if "attributes" in ad_component_data and "id" in ad_component_data["attributes"]:
                # 
                ad_id = ad_component_data["attributes"]["id"]
                
                #     
                _, tracking_start_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "start"})
                _, tracking_first_quartile_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "firstQuartile"})
                _, tracking_midpoint_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "midpoint"})
                _, tracking_third_quartile_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "thirdQuartile"})
                _, tracking_complete_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "complete"})
                _, tracking_mute_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "mute"})
                _, tracking_unmute_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "unmute"})
                _, tracking_pause_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "pause"})
                _, tracking_fullscreen_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "fullscreen"})
                #  
                tracking_start = tracking_start_list[0]["text"] if len(tracking_start_list) >= 1 else None
                tracking_first_quartile = tracking_first_quartile_list[0]["text"] if len(tracking_first_quartile_list) >= 1 else None
                tracking_midpoint = tracking_midpoint_list[0]["text"] if len(tracking_midpoint_list) >= 1 else None
                tracking_third_quartile = tracking_third_quartile_list[0]["text"] if len(tracking_third_quartile_list) >= 1 else None
                tracking_complete = tracking_complete_list[0]["text"] if len(tracking_complete_list) >= 1 else None
                tracking_mute = tracking_mute_list[0]["text"] if len(tracking_mute_list) >= 1 else None
                tracking_unmute = tracking_unmute_list[0]["text"] if len(tracking_unmute_list) >= 1 else None
                tracking_pause = tracking_pause_list[0]["text"] if len(tracking_pause_list) >= 1 else None
                tracking_fullscreen = tracking_fullscreen_list[0]["text"] if len(tracking_fullscreen_list) >= 1 else None
                # 
                _, impression_list = extract_data_by_attributes(ad_component, "Impression")

                impression = None
                impression_3p = None
                if len(impression_list) == 1 :
                    impression = impression_list[0]["text"]
                    impression_3p = None
                elif len(impression_list) == 2 :  
                    impression = next((item["text"] for item in impression_list if "attributes" in item and "id" in item["attributes"] and item["attributes"]["id"] == "SpringServe_Impression_1"), None)
                    impression_3p = next((item["text"] for item in impression_list if "attributes" in item and "id" in item["attributes"] and item["attributes"]["id"] == "SpringServe_Impression_3P_1"), None)
                else:
                    print("Need To Be Handel ")

                # Create and save a new VASTResponse object
                vast_obj = VastResponseGo.objects.create(
                    ad_id=ad_id,
                    tracking_start = tracking_start,
                    tracking_firstquartile=tracking_first_quartile,
                    tracking_midpoint=tracking_midpoint,
                    tracking_thirdquartile=tracking_third_quartile,
                    tracking_complete=tracking_complete,
                    datetime_timestamp=datetime_timestamp,
                    datetime_string=datetime_string,
                    vast_url=url,
                    impression_double_click=impression,
                    impression_sprintserve=impression_3p,
                    ad_spot=adspot
                )
                # save traking links
                if impression_3p:
                    sprintserve_response = requests.get(impression_3p, headers=headers)
                    vast_obj.impression_sprintserve_status = sprintserve_response.text or None
                    # print(sprintserve_response.status_code)

                if impression:
                    double_click_response = requests.get(impression, headers=headers)
                    vast_obj.impression_double_click_status = double_click_response.text 
                    # print(double_click_response.status_code)
                    
                if tracking_complete:
                    completed_click_response = requests.get(tracking_complete, headers=headers)
                    vast_obj.tracking_completed_status = completed_click_response.text 
                    # print(double_click_response.status_code)
                
                if tracking_first_quartile:
                    first_quartile_response = requests.get(tracking_first_quartile, headers=headers)
                    vast_obj.tracking_firstquartile_status = first_quartile_response.text
                    # print(tracking_first_quartile.status_code)

                if tracking_midpoint:
                    tracking_midpoint_response = requests.get(tracking_midpoint, headers=headers)
                    vast_obj.tracking_midpoint_status = tracking_midpoint_response.text
                    # print(tracking_midpoint.status_code)
                
                if tracking_third_quartile:
                    tracking_third_response = requests.get(tracking_third_quartile, headers=headers)
                    vast_obj.tracking_thirdquartile_status = tracking_third_response.text
                    # print(tracking_third_quartile.status_code)
                

                vast_obj.save()

# VAST CALL to ALMA
def call_vast_api(request):
    from .tasks import call_vast
    import requests
    from requests.auth import HTTPProxyAuth
    from concurrent.futures import ThreadPoolExecutor
    from urllib3.util import parse_url
    import random
    import datetime
    import os
    import xml.etree.ElementTree as ET

    iab_cat = {
        'Feuilleton': 500001 ,
        'Magazine': 500005,
        'Mini-Série': 500001,
        'Magazine ': 500005,
        'Mini-Serie': 500001,
        'Serie': 500001,
        'Information': 500011,
        'Magazine Sportif': 500013,
        'Dessin Anime': 500004,
        'Magazine Sportif ': 500013,
        'Telefilm': 500001,
        'Documentaire': 500003,
        'Religieux': 500008,
        'Sport': 500013,
        'Long Metrage': 500002,
        'News': 500011,
        'Long-Metrage': 500002,
        'Des Histoires Et Des Hommes': 500001,
        'Série': 500001,
        'Musique': 500006,
        'Theatre': 500007,
        'Spectacle': 500012,
        'Jeux': 500014
    }

    # iab_c = iab_cat[genre]

    params = {
        'w': '720',
        'h': '567',
        'content_genre': "Dessin Anime",
        'content_title': "Abtal albihar",
        'language': 'ar-MA',
        'pod_max_dur': 20,
        'channel_name': '2M_TV',
        'country': 'France'

    }

    # num_requests = total_volume
    num_requests = 20
    # *Links to send you have to comment them one by one

    #! Weaber
    # url = "https://tv.springserve.com/vast/769609"

    #! City
    # url = 'https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}'

    #! Ninjago
    url = "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    #! Technic
    # url = "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    #! Star Wars
    # url = "https://tv.springserve.com/vast/852369?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    vast = call_vast.delay(url=url,num_requests=num_requests,params=params)


    proxy_list = [
        # {"http": "http://91.208.69.128:12323", "https": "https://91.208.69.128:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://45.149.167.59:12323", "https": "https://45.149.167.59:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://191.101.246.245:12323", "https": "https://191.101.246.245:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://191.101.246.108:12323", "https": "https://191.101.246.108:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://185.248.48.237:12323", "https": "https://185.248.48.237:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},


        # {"http": "185.124.241.179:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "89.116.242.185:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "45.140.244.198:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "193.106.198.128:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "136.175.224.210:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},

        {"http": "185.124.241.179:12323",  "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "89.116.242.185:12323",   "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "45.140.244.198:12323",  "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "193.106.198.128:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "136.175.224.210:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},







    ]

    proxies0 = [
        {"http": "http://14a2f5dfde475:51ae51af21@185.124.241.179:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@89.116.242.185:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@45.140.244.198:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@193.106.198.128:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@136.175.224.210:12323"},
    ]

    # Rest of your code here..
    # with ThreadPoolExecutor(max_workers=20) as executor:
    #     futures = [executor.submit(vast_handling, url, headers=headers, params=params,call_num=i) for i in range(1,num_requests+1)]
    #     response = [future.result() for future in futures]
    #     print(response)

