
import os
import json
import time
import pytz
import random
import logging
import argparse
import datetime
import schedule
import threading
from math import exp
from numpy import nan
from pathlib import Path
from pprint import pprint
from slugify import slugify
from io import StringIO, BytesIO
from datetime import datetime, timedelta
try:
    from numpy.lib.stride_tricks import broadcast_arrays
except ImportError:
    from numpy import broadcast_arrays

from lxml import etree
import xml.dom.minidom as xml2
import xml.etree.ElementTree as xml

import ftplib
from ftplib import FTP_TLS, error_perm

from rest_framework.views import APIView

from threading import Thread
from concurrent.futures import ThreadPoolExecutor

from django import template
from django.db.models import Max
from django.contrib import messages
from django.db import models, IntegrityError
from django.shortcuts import render, redirect
from django.contrib.sessions.models import Session
from django.core.exceptions import ValidationError
from django.template.loader import render_to_string
from django.contrib.sessions.base_session import AbstractBaseSession
from django.contrib.sessions.backends.db import SessionStore as DBStore
from django.http import HttpResponse, JsonResponse, HttpResponseRedirect
from django.db import transaction

from .models import *

# Our tekchbila
from .utils import check_user, FTPClient, fetch_xml_from_url, parse_xml_content, extract_data_by_attributes

logger = logging.getLogger(__name__)
# colors = ['#9DBFF9','#FCCC75','#FF5C5C']
colors = ['#C152B6','#EA792D','#499E59','#23A8E2','#974BEF']

def login(request):
    if ('id_user' in request.session) and (request.session['id_user']) and (request.session['id_user'] != None) and Users.objects.filter(id_user=request.session['id_user']) != None :
        return  redirect("")
    return render(request, "DAIManagementApp/login.html")

def dologin(request):
    username = request.POST.get('username')
    password = request.POST.get('password')
    try:
        data=Users.objects.filter(userlogin=username).get(userpass=password)
    except Users.DoesNotExist:
        messages.error(request, 'Someting Wrong.')
        return render(request,'DAIManagementApp/login.html')
    if data:
        request.session['islogged'] = 'yes'
        request.session['id_user'] = str(data.id_user)
        return redirect('/DAIManagement/')
    else:
        messages.error(request, 'Error detected.')

def logout(request):
    #The request.session['islogged'] or['id_user'] raise KeyError if the given key isn�t already in the session.
    try:
        del request.session['islogged']
        del request.session['id_user']
    except KeyError:
        pass
    return redirect("login")

# @check_user
# def index(request):

#     from threading import Thread

#     # pprint(request.session.user)
#     #insert_verif = Thread(verifs_timer_insert,request)
#     #insert_verif.start()
#     # request.session['id_user'] = '1' # make is dynamique

#     channel = Channels.objects.get(pk = 1)
#     #day =  '2021-09-06' #make it dynamique than make it fix if data not found
#     #day = datetime.datetime.now()
#     dayformat = SfrAnalytics.objects.all().order_by("-id_impression")[0].day
#     #user = 1 ==> make it dynamique
#     channels_sfr = list(Channels.objects.filter(id_user=request.session['id_user']).values_list("sfr_channel_name",flat=True))
#     #region => make it dynamique
#     val = Sfr_analytics.objects.filter(sfr_channel_name__in=channels_sfr, region='France' ,day=str(dayformat))
#     result = []
#     channel_sfr=[]
#     purcent = Impressions.objects.get(pk='1') # change the name impressions to SFR_market_share
#     for channel in channels_sfr :
#         res= val.filter(sfr_channel_name=channel).values_list('minute','purcent')
#         print("res === " , len(res))
#         if len(res)>0:

#             nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)
#             labels  = [x[0] for x in res ]
#             data  = [float(x[1])*nb for x in res]
#             result.append( data)
#             channel_sfr.append(channel)
#         # else :
#         #     labels = [x[0] for x in res ]
#         #     data = []
#         #     channel = []
#         #     result.append( data)
#         #     channel_sfr.append(channel)
#     print("labels === " ,labels)
#     import random

#     color = colors

#     channels =Channels.objects.filter(id_user = request.session['id_user']).values_list('channel_name')
#     channels = [x[0] for x in channels]
#     #channels_id =Channels.objects.filter(id_user = request.session['id_user'])
#     lines  = Verifs.objects.filter(airStatuscode="0001").order_by("-id_verif")[:80]
#     # lines  = Verifs.objects.all()
#     data2 = []

#     for line in lines:
#         print(line.networkname )
#         if line.networkname in channels :
#             p ={
#                 'channel':line.networkname,
#                 'name' : line.spotId,
#                 'day' : str(line.airTime).split(' ')[0],
#             }

#             id = Channels.objects.get(channel_name= line.networkname , id_user = request.session['id_user'])
#             print(id)
#             region = ChannelsZone.objects.get(id_channel=id,zonename=line.zonename )
#             p['region'] = region.region
#             min = str(line.airTime).split(' ')[1].split(':')
#             minute = min[0]+':'+min[1]+':00'
#             p['minute'] = minute
#             p['color'] = '#00800000'
#             if  str(line.airStatuscode) == '0001':
#                 p['status'] = 'Aired Successfully'
#                 p['color'] = '#2c2c8cb3'
#                 try :
#                     sfr = Sfr_analytics.objects.filter(day=p['day'],minute=p['minute']).order_by('-id_verif')
#                     for i in sfr  :
#                         if p['channel'] in i.sfr_channel_name :
#                             purcent = Impressions.objects.get(pk='1')
#                             nb  = float(i.purcent) * float(purcent.total_users) / float(purcent.market_share_purcent)
#                             p['nb_wach'] = nb
#                 except :
#                         p['nb_wach'] = 0

#             elif str(line.airStatuscode) == '0008':
#                     p['status'] = 'Failed, Adspot cut'
#                     p['nb_wach'] = '-'

#             else :
#                     p['status'] = 'Failed, Other Reason'
#                     p['nb_wach'] = '-'

#             data2.append(p)


#     campaigns = len(Campaigns.objects.filter(id_user=request.session['id_user']))
#     campaigns_active = len(Campaigns.objects.filter(id_user=request.session['id_user'],pacing=True))
#     advertiser = most_advertisers(request)
#     bookeds = booked_adbreaks(request)
#     agences =active_agency(request)
#     playlist = playlists(request)
#     activites = Activity.objects.all().order_by('-id_activity')[0:5]
#     channels = Channels.objects.filter(id_user=request.session['id_user'])

#     # Added 18-12-2023
#     no_vast_query_old = """
#         SELECT
#             Campaigns.id_campaign,
#             Count(Verifs.airStatusCode) as total_ads,
#             SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
#         FROM
#             Campaigns
#         LEFT JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
#         LEFT JOIN Verifs ON Verifs.spotId = Adspots.filename
#         LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
#         AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
#         AND SFR_analytics.sfr_channel_name = '2M Maroc'
#         WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
#         AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
#         GROUP BY Campaigns.id_campaign
#     """

#     no_vast_query = """
#         SELECT
#             Campaigns.id_campaign,
#             Count(Verifs.airStatusCode) as total_ads,
#             SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
#         FROM
#             Campaigns
#         INNER JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
#         INNER JOIN Verifs ON Verifs.spotId = Adspots.filename
#         INNER JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
#         AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
#         AND SFR_analytics.sfr_channel_name = '2M Maroc'
#         WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
#         AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
# 				AND Adspots.creative_id is null
#         GROUP BY Campaigns.id_campaign
#     """

#     vast_query = """
#         SELECT
#             Campaigns.id_campaign,
#             COALESCE(Ads.total_ads, 0) as total_ads,
#             COALESCE(Impressions.total_impression, 0) as total_impression
#         FROM
#             Campaigns
#         LEFT JOIN (
#             SELECT
#                 Campaigns.id_campaign,
#                 COUNT(Verifs.airStatusCode) as total_ads
#             FROM
#                 Campaigns
#             LEFT JOIN
#                 Adspots ON Campaigns.id_campaign = Adspots.id_campaign
#             LEFT JOIN
#                 Verifs ON Verifs.spotId = Adspots.filename
#             WHERE
#                 Adspots.id_campaign is not null and Verifs.airStatusCode= 0001
#                 AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day
#                 AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
#             GROUP BY
#                 Campaigns.id_campaign
#         ) Ads ON Campaigns.id_campaign = Ads.id_campaign
#         LEFT JOIN (
#             SELECT
#                 Campaigns.id_campaign,
#                 COUNT(VAST_response.AD_id) as total_impression
#             FROM
#                 Campaigns
#             LEFT JOIN
#                 VAST_response ON Campaigns.vast_data = VAST_response.vast_url
#                 AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) > Campaigns.start_day
#                 AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) < Campaigns.end_day
#             WHERE
#                 Campaigns.vast_data is not null
#             GROUP BY
#                 Campaigns.id_campaign
#         ) Impressions ON Campaigns.id_campaign = Impressions.id_campaign
#         WHERE Campaigns.vast_data is not null
#     """

#     from django.db import connection
#     from .utils import NoVastResult, VastResult

#     # Execute the no_vast_query
#     with connection.cursor() as cursor:
#         cursor.execute(no_vast_query)
#         no_vast_results = [NoVastResult(*row) for row in cursor.fetchall()]

#     # Execute the vast_query
#     with connection.cursor() as cursor:
#         cursor.execute(vast_query)
#         vast_results = [VastResult(*row) for row in cursor.fetchall()]


#     #activites = []
#     return render(
#         request,
#         "DAIManagementApp/index.html",
#         {
#             'labels': labels,
#             'data': data,
#             'data2': data2,
#             'day': dayformat,
#             'nb_channels':len(channels) ,
#             'campaigns':campaigns,
#             'campaigns_active':campaigns_active ,
#             'advertisers':advertiser,
#             'bookeds':bookeds,
#             'agences':agences,
#             'playlists':playlist,
#             'activites':activites ,
#             "channels":channels ,
#             'result':result ,
#             'channels_sfr':channel_sfr,
#             'color':color,

#             'campaigns_data': Campaigns.objects.filter(id_user=request.session['id_user']),
#             'vast_results': vast_results,
#             'no_vast_results': no_vast_results
#         }
#     )

@check_user
def index(request):

    channel = Channels.objects.get(pk = 1)

    dayformat = SfrAnalytics.objects.all().order_by("-id_impression")[0].day

    channels_sfr = list(Channels.objects.filter(id_user=request.session['id_user']).values_list("sfr_channel_name",flat=True))

    val = Sfr_analytics.objects.filter(sfr_channel_name__in=channels_sfr, region='France' ,day=str(dayformat))

    result = []

    channel_sfr=[]

    purcent = Impressions.objects.get(pk='1')

    for channel in channels_sfr :
        res= val.filter(sfr_channel_name=channel).values_list('minute','purcent')
        print("res === " , len(res))
        if len(res)>0:

            nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)
            labels  = [x[0] for x in res ]
            data  = [float(x[1])*nb for x in res]
            result.append( data)
            channel_sfr.append(channel)
        # else :
        #     labels = [x[0] for x in res ]
        #     data = []
        #     channel = []
        #     result.append( data)
        #     channel_sfr.append(channel)

    print("labels === " ,labels)

    color = colors

    channels = Channels.objects.filter(id_user = request.session['id_user']).values_list('channel_name')
    channels = [x[0] for x in channels]

    # #channels_id =Channels.objects.filter(id_user = request.session['id_user'])

    lines  = Verifs.objects.filter(airStatuscode="0001").order_by("-id_verif")[:80]

    # # lines  = Verifs.objects.all()
    # data2 = []

    # for line in lines:
    #     print(line.networkname )
    #     if line.networkname in channels :
    #         p ={
    #             'channel':line.networkname,
    #             'name' : line.spotId,
    #             'day' : str(line.airTime).split(' ')[0],
    #         }

    #         id = Channels.objects.get(channel_name= line.networkname , id_user = request.session['id_user'])
    #         print(id)
    #         region = ChannelsZone.objects.get(id_channel=id,zonename=line.zonename )
    #         p['region'] = region.region
    #         min = str(line.airTime).split(' ')[1].split(':')
    #         minute = min[0]+':'+min[1]+':00'
    #         p['minute'] = minute
    #         p['color'] = '#00800000'
    #         if  str(line.airStatuscode) == '0001':
    #             p['status'] = 'Aired Successfully'
    #             p['color'] = '#2c2c8cb3'
    #             try :
    #                 sfr = Sfr_analytics.objects.filter(day=p['day'],minute=p['minute']).order_by('-id_verif')
    #                 for i in sfr  :
    #                     if p['channel'] in i.sfr_channel_name :
    #                         purcent = Impressions.objects.get(pk='1')
    #                         nb  = float(i.purcent) * float(purcent.total_users) / float(purcent.market_share_purcent)
    #                         p['nb_wach'] = nb
    #             except :
    #                     p['nb_wach'] = 0

    #         elif str(line.airStatuscode) == '0008':
    #                 p['status'] = 'Failed, Adspot cut'
    #                 p['nb_wach'] = '-'

    #         else :
    #                 p['status'] = 'Failed, Other Reason'
    #                 p['nb_wach'] = '-'

    #         data2.append(p)


    campaigns = len(Campaigns.objects.filter(id_user=request.session['id_user']))
    campaigns_active = len(Campaigns.objects.filter(id_user=request.session['id_user'],pacing=True))
    advertiser = most_advertisers(request)
    bookeds = booked_adbreaks(request)
    agences = active_agency(request)
    playlist = playlists(request)
    activites = Activity.objects.all().order_by('-id_activity')[0:5]

    channels = Channels.objects.filter(id_user=request.session['id_user'])

    # Added 18-12-2023
    no_vast_query_old = """
        SELECT
            Campaigns.id_campaign,
            Count(Verifs.airStatusCode) as total_ads,
            SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
        FROM
            Campaigns
        LEFT JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
        LEFT JOIN Verifs ON Verifs.spotId = Adspots.filename
        LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
        AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
        GROUP BY Campaigns.id_campaign
    """

    no_vast_query = """
        SELECT
            Campaigns.id_campaign,
            Count(Verifs.airStatusCode) as total_ads,
            SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
        FROM
            Campaigns
        INNER JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
        INNER JOIN Verifs ON Verifs.spotId = Adspots.filename
        INNER JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
        AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
				AND Adspots.creative_id is null
        GROUP BY Campaigns.id_campaign
    """

    vast_query = """
        SELECT
            Campaigns.id_campaign,
            COALESCE(Ads.total_ads, 0) as total_ads,
            COALESCE(Impressions.total_impression, 0) as total_impression
        FROM
            Campaigns
        LEFT JOIN (
            SELECT
                Campaigns.id_campaign,
                COUNT(Verifs.airStatusCode) as total_ads
            FROM
                Campaigns
            LEFT JOIN
                Adspots ON Campaigns.id_campaign = Adspots.id_campaign
            LEFT JOIN
                Verifs ON Verifs.spotId = Adspots.filename
            WHERE
                Adspots.id_campaign is not null and Verifs.airStatusCode= 0001
                AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day
                AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
            GROUP BY
                Campaigns.id_campaign
        ) Ads ON Campaigns.id_campaign = Ads.id_campaign
        LEFT JOIN (
            SELECT
                Campaigns.id_campaign,
                COUNT(VAST_response.AD_id) as total_impression
            FROM
                Campaigns
            LEFT JOIN
                VAST_response ON Campaigns.vast_data = VAST_response.vast_url
                AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) > Campaigns.start_day
                AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) < Campaigns.end_day
            WHERE
                Campaigns.vast_data is not null
            GROUP BY
                Campaigns.id_campaign
        ) Impressions ON Campaigns.id_campaign = Impressions.id_campaign
        WHERE Campaigns.vast_data is not null
    """

    from django.db import connection
    from .utils import NoVastResult, VastResult

    # Execute the no_vast_query
    with connection.cursor() as cursor:
        cursor.execute(no_vast_query)
        no_vast_results = [NoVastResult(*row) for row in cursor.fetchall()]

    # Execute the vast_query
    with connection.cursor() as cursor:
        cursor.execute(vast_query)
        vast_results = [VastResult(*row) for row in cursor.fetchall()]


    #activites = []
    return render(
        request,
        "DAIManagementApp/index.html",
        {
            'labels': labels,
            # 'data': data,
            # 'data2': data2,
            'day': dayformat,
            'nb_channels':len(channels) ,
            'campaigns':campaigns,
            'campaigns_active':campaigns_active ,
            'advertisers':advertiser,
            'bookeds':bookeds,
            'agences':agences,
            'playlists':playlist,
            'activites':activites ,
            "channels":channels ,
            'result':result ,
            'channels_sfr':channel_sfr,
            'color':color,

            'campaigns_data': Campaigns.objects.filter(id_user=request.session['id_user']),
            'vast_results': vast_results,
            'no_vast_results': no_vast_results
        }
    )

# calculer le nombrer des playlists delivrer et annuler
@check_user
def playlists(request):
    from datetime import datetime, timedelta
    result = {
        'delivered' :0,
        'cancelled' :0,
        'purcent' : 0
    }
    channels =Channels.objects.filter(id_user = request.session['id_user'])
    yesterday = datetime.now() - timedelta(days=1)
    date = yesterday.strftime('%Y-%m-%d')
    try :
        playlist = list(Playlists.objects.filter(id_channel__in = channels ,broadcastdate=date ))[-1]
        channel = playlist.id_channel.channel_name
    except:
        return result
    day = date.replace('-','')
    verifs = Verifs.objects.filter(networkname=channel,broadcastDate=date).values_list('airStatuscode')
    verifs = [i[0] for i in verifs ]
    if len(verifs) == 0:
        return result
    for i in verifs :
        if i =='0001':
            result['delivered'] +=1
        else:
            result['cancelled'] +=1
    purcent  = result['delivered']*100/len(verifs)
    result['purcent'] = round(purcent,2)
    return result

@check_user
def booked_adbreaks(request):
    channels =Channels.objects.filter(id_user = request.session['id_user']).values_list('id_channel','channel_name')
    result = []
    for channel in channels :
        r = {}

        adspot = Adspots.objects.filter(id_channel = channel[0])
        booked = Campaigns.objects.filter(id_adpost__in=adspot , booked=True)
        avail = AdspotsInAvail.objects.filter(id_adspot__in = adspot)
        r['channel'] = channel[1]
        r['booked'] = len(booked)
        r['avail'] = len(avail)
        if len(booked) > 0 :
            result.append(r)
    return result

# les campaigns qui sont termine  ou en cours
@check_user
def active_agency(request):
    agences = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0).values_list('id_agency','name','datetime')
    result = []
    for agence in agences:
        r={}
        r['name'] = agence[1]
        brands = Brands.objects.filter(id_agency=agence[0])
        campaigns = Campaigns.objects.filter(id_brand__in=brands,pacing=True)
        if len(campaigns)!=0:
            r['date'] = agence[2]
            r['campaigns'] = len(campaigns)
            result.append(r)
    return result

@check_user
def most_advertisers(request) :
    brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand')
    brands_id = [x[0] for x in brands_id]
    brands = Brands.objects.filter(pk__in=brands_id).order_by('-id_brand')
    totals = []
    dic = []
    result = []
    for i in brands:
        adspots = Adspots.objects.filter(id_brand=i)
        toutal = len(adspots)
        totals.append(toutal)
        brand = {'id_brand' : i.id_brand,
                'name_brand' : i.brand_name,
                'total_adspots' : toutal,
                'logo' : i.logo }
        print(brand)
        dic.append(brand)
    print(totals)
    for i in range(0,4):
        max_value = max(totals)
        max_index = totals.index(max_value)
        a=totals.pop(max_index)
        result.append(dic[max_index])
    print(result)
    return result

@check_user
def test_options(request):
    if request.method =='POST':
        options = request.POST.get('optionsname')
        print(options)
    return render(request, "DAIManagementApp/test_options.html")

def handle_uploaded_file(file, path, filename):
    with open(path+"/"+filename, 'wb+') as destination:
        for chunk in file.chunks():
            destination.write(chunk)

# Generating XML file from the Database
def GenerateXMLfromDatabase(day, channel_id, id_zonename, version,draft_version='0'):
    #day is 2021-10-25
    # getting the channel selected in form
    channel = Channels.objects.get(id_channel=channel_id)
    channel_zone = ChannelsZone.objects.get(id_zone_channel=id_zonename)

    # day format will be YYYY-MM-DD, so to get the other format YYYYMMDD we'll transform it
    datetimeobject = datetime.datetime.strptime(day, '%Y-%m-%d')
    dayformat = datetimeobject.strftime('%Y%m%d')
    dayformat_string = str(dayformat)

    # finding the playlist based on Channel and Daytime and Version
    # max_version_draft = Playlists.objects.filter(broadcastdate=str(day)).aggregate(Max('draft_version')).get('draft_version__max')
    # new_version_draft = max_version_draft
    playlist = Playlists.objects.get(id_channel_id=int(channel_id), version=version, broadcastdate=str(day), id_zone_channel=channel_zone.id_zone_channel,is_draft='0',draft_version=draft_version)

    # insertion of Schedule tag data in the xml file
    root = xml.Element('Schedule')
    root.set('xmlns', 'http://www.scte.org/schemas/118-3/201X')
    root.set('broadcastDate', dayformat_string)
    root.set('begDateTime', day + 'T00:01:00+00:00')
    root.set('endDateTime', day + 'T23:59:59+00:00')
    root.set('networkName', channel.networkname)
    root.set('zoneName', channel_zone.zonename)
    root.set('revision', version)
    root.set('level', '0')
    root.set('schemaVersion', 'http://www.w3.org/2001/XMLSchema')

    # finding windows linked to this playlist
    windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')

    windows_array = {}
    win_i = 0
    trafficid = 0
    for window in windows:
        # transforming window start from datetime to string
        windowStartxml = str(window.window_start).replace(' ', 'T')
        windowStartxml = windowStartxml + '+00:00'
        # using windows_array to insert as many windows as possible without having issues with the names of windows to append to xml
        windows_array[win_i] = xml.Element('Window')  # we can have more than one window
        windows_array[win_i].set('windowStart', windowStartxml)
        windows_array[win_i].set('windowDuration', window.window_duration)
        # root.append(windows_array[win_i])
        # finding avails linked to this window
        avails = Avails.objects.filter(id_window=window.id_window)
        if len(avails) > 0:
            root.append(windows_array[win_i])

        avails_array = {}
        av_i = 0
        for avail in avails:
            # transforming window start from datetime to string
            availStartxml = str(avail.avail_start).replace(' ', 'T')
            availStartxml =  availStartxml + '+00:00'
            # using avails_array to insert as many avails as possible without having issues with the names of avails to append to xml
            avails_array[av_i] = xml.SubElement(windows_array[win_i], 'Avail')
            avails_array[av_i].set('availStart', availStartxml)
            avails_array[av_i].set('availInWindow', avail.availinwindow)
            avails_array[av_i].set('availNum', '0')
            # finding adspots linked to this avail
            adspotsinAV = AdspotsInAvail.objects.filter(id_avail=avail.id_avail).select_related('id_adspot')
            adspots_array = {}
            ads_i = 0
            for adspot in adspotsinAV:
                trafficid+=1
                adspot_duration = time.strftime('%H%M%S00', time.gmtime(int(adspot.id_adspot.duration)))
                adspots_array[ads_i] = xml.SubElement(avails_array[av_i], 'Spot')
                adspots_array[ads_i].set('eventType', 'LOI')
                # adspots_array[ads_i].set('trafficId', str(adspot.trafficid))
                adspots_array[ads_i].set('trafficId', str(trafficid))
                adspots_array[ads_i].set('positionInAvail', str(adspot.positioninavail))
                # we will add a function that will convert seconds to hhmmssmm there are many ones on stackoverflow, let's now suppose that the duration of the spot is less than 60 seconds just to test
                adspots_array[ads_i].set('length', adspot_duration)
                adspots_array[ads_i].set('spotId', adspot.id_adspot.filename)
                adspots_array[ads_i].set('adId', str(adspot.id_adspot.duration) + 'sec')
                adspots_array[ads_i].set('schedSource', 'Local')
            ads_i += 1
        av_i += 1
    win_i += 1
    tree = xml.ElementTree(root)
    print(dayformat_string)
    print(channel_zone.zonename)
    print(channel.channel_name)
    print(playlist.version)
    fileName = dayformat_string + "-" + channel_zone.zonename + "-" + channel.channel_name + "-" + playlist.version + ".sch"
    print(fileName)
    path = "files/DAI-Management"
    if not os.path.exists(path):
        os.makedirs(path)
    with open(fileName, "wb") as files:
        tree.write(files, encoding='utf-8', xml_declaration=True)
    return fileName


def testGenerateFromDatabase(request):
    return HttpResponse(GenerateXMLfromDatabase("2021-08-16", "2M", "1"))


def getfile(filename , mode ):
    for path, subdirs, files in os.walk('/'):
        for name in files:
            if filename == name :
                file = os.path.join(path, name)
                f = open(file , mode)
                return f


def uploadFTP2(host, user, password, filename, path_inftp):

    ftp = ftplib.FTP(host, user, password)
    ftp.encoding = "utf-8"
    ftp.cwd(path_inftp)

    for path, subdirs, files in os.walk(os.getcwd()):
        for name in files:
            if filename == name :
                file = os.path.join(path, name)
                print(file)
                with open(file, "rb") as f:
                    ftp.storbinary(f"STOR {filename}", f)
    ftp.quit()

def downloadFTP(host, user, password, filepath_inftp, file_inftp,  localpath):

    ftp = ftplib.FTP(host, user, password)
    ftp.encoding = "utf-8"
    ftp.cwd(filepath_inftp)
    filename = file_inftp
    # localfile = localpath+'/'+filename
    localfile = localpath+'/'+filename
    print(localfile)

    try:
        with open(localfile, "wb") as file:
            ftp.retrbinary(f"RETR {filename}", file.write)
    except error_perm:
        print('ERR', localfile)
        os.unlink(localfile)

    #
    # with open(localfile, "wb") as file:
    #     ftp.retrbinary(f"RETR {filename}", file.write)

    ftp.quit()



def test_download(request):
    return HttpResponse(downloadFTP("uk06.tmd.cloud", "testftp@epgsano.com", "I?#=s3FfnSu_", "/2M/schedules/",  "test.txt" , "/var/www/html/DAI-Management/DAIManagement/FTP_files/"))


def uploadFTP(host, port, user, password, filename, path_inftp):

    ftp = ftplib.FTP(host, user, password, port) if port else ftplib.FTP(host, user, password)
    # ftp.encoding = "utf-8"
    # Enter File Name with Extension
    ftp.cwd(path_inftp)
    module_dir = os.path.dirname(__file__)  # get current directory
    path = "files/DAI-Management"


    # Read file in binary mode
    with open(filename, "rb") as file:
        # Command for Uploading the file "STOR filename"
        ftp.storbinary(f"STOR {filename}", file)
    ftp.quit()

def AlmaFtp(host, username, password, local_file_path, remote_file_path):
    from ftplib import FTP
    try:
        # Connect to the FTP server
        with FTP(host) as ftp:
            # Login with credentials
            ftp.login(username, password)

            # Set the FTP directory (optional)
            # ftp.cwd('/your/remote/directory')

            # Open the local file in binary mode
            with open(local_file_path, 'rb') as file:
                # Upload the file to the FTP server
                ftp.storbinary(f'STOR {remote_file_path}', file)

            print(f"File '{local_file_path}' uploaded to '{remote_file_path}' on the FTP server.")
    except Exception as e:
        print(f"Error: {e}")



def uploadFTP4(host, port, user, password, filename, path_inftp):


    try:
        ftp = ftplib.FTP(host, user, password)
        ftp.cwd(path_inftp)
        # ftp = ftplib.FTP(host, user, password)
        module_dir = os.path.dirname(__file__)  # get current directory
        path = "files/DAI-Management"

        with open(filename, "rb") as file:
            ftp.storbinary(f"STOR {filename}", file)
        ftp.quit()

        return True

    except Exception as e:
        print('%s' % type(e))
        send_msg_telegram2("⚠️ Connectivity issue with Enensys server, trying again ... ⚠️")
        try:
            ftp = ftplib.FTP(host, user, password)
            ftp.cwd(path_inftp)
            # ftp = ftplib.FTP(host, user, password)
            module_dir = os.path.dirname(__file__)  # get current directory
            path = "files/DAI-Management"

            with open(filename, "rb") as file:
                ftp.storbinary(f"STOR {filename}", file)
            ftp.quit()

            return True

        except Exception as e:
            print('%s' % type(e))
            send_msg_telegram2("⛔️ update was not pushed, please check OpenVPN or Enensys")
            return False


    #
    # try:
    #     ftp = ftplib.FTP(host, user, password)
    # # ftp.encoding = "utf-8"
    # # Enter File Name with Extension
    #     ftp.cwd(path_inftp)
    #     module_dir = os.path.dirname(__file__)  # get current directory
    #     path = "files/DAI-Management"
    #
    #
    # # Read file in binary mode
    #     with open(filename, "rb") as file:
    #         # Command for Uploading the file "STOR filename"
    #         ftp.storbinary(f"STOR {filename}", file)
    #     ftp.quit()
    #     return True
    #
    # except (error_temp, BrokenPipeError, socket.timeout) as e:
    #     print(e)
    #     return False

@check_user
def saveXML2db(request):
    path = "files/results/20210906-2005-00001.xml"
    doc = xml2.parse(path)
    networkname = doc.firstChild.getAttribute("networkName")
    zonename = doc.firstChild.getAttribute("zoneName")
    broadcastdate = doc.firstChild.getAttribute("broadcastDate")
    verComplete = doc.firstChild.getAttribute("verComplete")


    Spots = doc.getElementsByTagName("Spot")
    results = []
    for spot in Spots:
        trafficId = spot.getAttribute("trafficId")
        spotId  = spot.getAttribute("spotId")

        airTime = spot.getAttribute("airTime")
        newAirTime = airTime.replace("T", " ")
        newAirTime2 = newAirTime.replace("+02:00", "")

        airLength = spot.getAttribute("airLength")
        airStatusCode = spot.getAttribute("airStatusCode")
        version = spot.getAttribute("revision")
        try:
            Verifs.objects.update_or_create(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
        except IntegrityError as e:
            insertion = Verifs.objects.filter(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version).update(vercomplete = verComplete)

        # Verifs.objects.update_or_create(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
        #verifs.save()
    return HttpResponse("data has been inserted")




@check_user
def test_upload(request):
    return HttpResponse(uploadFTP2("uk06.tmd.cloud", "testftp@epgsano.com", "I?#=s3FfnSu_", "/files/DAI-Management/20210815-2005-2M-1.sch" , "2M/schedules/France"))


# def my_custom_sql(query, params):
#     from django.db import connection
#     with connection.cursor() as cursor:
#         cursor.execute(query, params)
#         results = cursor.fetchall()
#     return results

@check_user
def campaign_report(request, data=""):

    paris_tz = pytz.timezone('Europe/Paris')
    paris_time = datetime.datetime.now(paris_tz)
    utc_offset = paris_time.utcoffset().total_seconds() / 3600

    # if utc_offset == 1:
    if 0==1:

        # *************************************
        # * for when France timezone is GMT+1 *
        # *************************************

        query = """
        SELECT networkname, spotId, airTime, airStatusCode, purcent AS sfr_percentage, purcent*1.25*4500000/17 AS total_volume, Epg.emission_name
        FROM Verifs
        inner JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        inner JOIN Epg ON (Verifs.airTime < Epg.End_time AND Verifs.airTime > Epg.Start_time)
        WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
        GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name ORDER BY airTime
        """

    # if utc_offset == 2:
    if 1==1:

        # *************************************
        # * for when France timezone is GMT+2 *
        # *************************************

        query = """
        SELECT networkname, spotId, airTime, airStatusCode, purcent AS sfr_percentage, purcent*1.25*4500000/17 AS total_volume, Epg.emission_name
        FROM Verifs
        inner JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        inner JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 1 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 1 HOUR))
        WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
        GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name ORDER BY airTime
        """

    def dictfetchall(cursor):
        "Return all rows from a cursor as a dict"
        columns = [col[0] for col in cursor.description]
        return [
            dict(zip(columns, row))
            for row in cursor.fetchall()
        ]

    if data != "":
        campaign = Campaigns.objects.get(pk=data)

        # Your logic to process the request and prepare data
        # data = {
        #     'start_date': campaign.start_day,
        #     'end_date': campaign.end_day,
        #     'id_brand': campaign.id_brand.id_brand
        # }

        from django.db import connection

        cursor = connection.cursor()

        data_tuple=(
            f"%{campaign.id_brand.brand_name}%",
            (campaign.start_day).replace("-", ""),
            (campaign.end_day).replace("-", "")
        )

        cursor.execute(query,data_tuple)
        # data = cursor.fetchall()
        total = 0

        data = dictfetchall(cursor)

        for row in data:
            if row['total_volume'] == None:
                print(row)
            else:
                total += int(row['total_volume'])

        # Return a JsonResponse with the data
        return render(
            request,
            "DAIManagementApp/report_campaign.html",
            {
                'data': data,
                'total':total
            }
        )
    else:
        return redirect("edit_campaign")

def dictfetchall(cursor):
    "Return all rows from a cursor as a dict"
    columns = [col[0] for col in cursor.description]
    return [dict(zip(columns, row)) for row in cursor.fetchall()]


@check_user
def report_advertiser_backup(request ,id_brand=""):
    from django.db.models import F, ExpressionWrapper, FloatField
    from django.db.models.functions import Substr
    from datetime import timedelta

    # def my_custom_sql(query, params):
    #     from django.db import connection
    #     with connection.cursor() as cursor:
    #         cursor.execute(query, params)
    #         columns = [col[0] for col in cursor.description]
    #     return [
    #         dict(zip(columns, row))
    #         for row in cursor.fetchall()
    #     ]
    def dictfetchall(cursor):
        "Return all rows from a cursor as a dict"
        columns = [col[0] for col in cursor.description]
        return [
            dict(zip(columns, row))
            for row in cursor.fetchall()
        ]

    def my_custom_sql(query, params):

        try:
            with connection.cursor() as cursor:
                cursor.execute(query, params)
                rows = cursor.fetchall()
                return rows
        except Exception as e:
            print(f"An error occurred: {e}")
            return []

    if request.method =='POST':

        from datetime import datetime
        import pytz

        # Get the current date and time
        current_datetime = datetime.now()

        # Format the date in 'YYYY-MM-DD' format
        report_date = current_datetime.strftime('%Y-%m-%d')


        paris_tz = pytz.timezone('Europe/Paris')
        paris_time = datetime.now(paris_tz)
        utc_offset = paris_time.utcoffset().total_seconds() / 3600

        print(f"Current UTC offset for France (Paris) is GMT+{int(utc_offset)}.")

        start_day = request.POST.get('start_day').replace("-", "")
        end_day = request.POST.get('end_day').replace("-", "")
        selected_brand = request.POST.get('selected_brand')

        # if utc_offset == 1:
        #
        #     # *************************************
        #     # * for when France timezone is GMT+1 *
        #     # *************************************
        #
        #     query = """
        #     SELECT networkname, spotId, airTime, airStatusCode, purcent AS sfr_percentage, purcent*1.25*4500000/17 AS total_volume, Epg.emission_name
        #     FROM Verifs
        #     LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        #     AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        #     AND SFR_analytics.sfr_channel_name = '2M Maroc'
        #     LEFT JOIN Epg ON (Verifs.airTime < Epg.End_time AND Verifs.airTime > Epg.Start_time)
        #     WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
        #     GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name ORDER BY airTime
        #     """

        # if utc_offset == 2:
        if utc_offset == 1:


            # *************************************
            # * for when France timezone is GMT+2 *
            # *************************************

            query_old = """
            SELECT networkname, spotId, DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime, airStatusCode, purcent AS sfr_percentage, purcent*1.25*4500000/17 AS total_volume, Epg.emission_name
            FROM Verifs
            LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            AND SFR_analytics.sfr_channel_name = '2M Maroc'
            LEFT JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 1 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 1 HOUR))
            WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name ORDER BY airTime
            """

            query_ol3 = """
            SELECT networkname, spotId, DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime, airStatusCode, purcent AS sfr_percentage, CAST(purcent * 1.25 * 4500000 / 17 AS UNSIGNED) AS total_volume, Epg.emission_name
            FROM Verifs
            LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            AND SFR_analytics.sfr_channel_name = '2M Maroc'
            LEFT JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 1 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 1 HOUR))
            WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name
            ORDER BY airTime
            """

            query_old4 = """
            SELECT networkname, spotId, DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime, airStatusCode, purcent AS sfr_percentage, CAST(purcent * 1.25 * 4500000 / 17 AS UNSIGNED) AS total_volume, Epg.emission_name
            FROM Verifs
            LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            AND SFR_analytics.sfr_channel_name = '2M Maroc'
            Inner JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 2 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 2 HOUR))
            WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name
            ORDER BY airTime
            """


            query = """
            SELECT networkname,
            spotId,
            DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime,
            airStatusCode,
            purcent AS sfr_percentage,
            CAST(MAX(purcent) * 1.25 * 4500000 / 17 AS UNSIGNED) AS total_volume,
            Epg.emission_name
            FROM Verifs
            LEFT JOIN (
            SELECT sfr_channel_name,
            `minute`,
            `day`,
            MAX(purcent) as purcent
            FROM SFR_analytics
            WHERE sfr_channel_name = '2M Maroc'
            GROUP BY sfr_channel_name, `minute`, `day`
            ) AS SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            Inner JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 2 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 2 HOUR))
            WHERE Verifs.spotId LIKE %s
            AND Verifs.airStatusCode = '0001'
            AND Verifs.broadcastDate > %s
            AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name
            ORDER BY airTime
            """

            # query = """
            # SELECT networkname, spotId, airTime, airStatusCode, purcent AS sfr_percentage, purcent*1.25*4500000/17 AS total_volume, Epg.emission_name
            # FROM Verifs
            # LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            # AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            # AND SFR_analytics.sfr_channel_name = '2M Maroc'
            # LEFT JOIN Epg ON (Epg.End_time > DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) AND Epg.End_time < DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR))
            # WHERE Verifs.spotId LIKE %s AND Verifs.airStatusCode = '0001' AND Verifs.broadcastDate > %s AND Verifs.broadcastDate < %s
            # GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name ORDER BY airTime
            # """


        # The % signs are part of the parameter, not the SQL string
        # params = [f"%{selected_brand}%", '0001%', start_day, end_day]


        from django.db import connection
        cursor = connection.cursor()
        data_tuple=(f"%{selected_brand}%", start_day, end_day)
        cursor.execute(query,data_tuple)
        # data = cursor.fetchall()
        total = 0
        data = dictfetchall(cursor)


        processed_data = []
        last_date = None

        for row in data:
            # Convert airTime string to datetime object to extract the date part only
            row_date = datetime.strptime(row['airTime'], '%Y-%m-%d %H:%M:%S').date()
            if last_date and row_date != last_date:
                # Insert an empty or placeholder row
                processed_data.append({'networkname': '--', 'spotId': '--', 'airTime': '--', 'total_volume': '--', 'emission_name': '--'})
            processed_data.append(row)
            last_date = row_date


        for row in data:
            if row['total_volume'] == None:
                print(row)
            else:
                total += int(row['total_volume'])

        return render(request,'DAIManagementApp/view_report.html', {'data': processed_data, 'total':total, 'selected_brand':selected_brand, 'report_date':report_date })


        # result = Verifs.objects.filter(spotId__contains='kia', airStatuscode__startswith='0001', broadcastDate__gt=start_date)\
        # .annotate(sfr_percentage=F('purcent'), total_volume=ExpressionWrapper(F('purcent') * 1.25 * 4500000 / 17, output_field=DateTimeField()))\
        # .values('networkname', 'spotId', 'airTime', 'airStatusCode', 'sfr_percentage', 'total_volume', 'epg__emission_name')\
        # .prefetch_related('sfranalytics_set', 'epg')
        #
        # # Apply the JOIN conditions
        # result = result.filter(sfranalytics__minute__startswith=Substr(F('airTime'), 12, 5),
        #                sfranalytics__day=Substr(F('airTime'), 1, 10),
        #                sfranalytics__sfr_channel_name='2M Maroc',
        #                epg__start_time__lt=ExpressionWrapper(F('airTime') + timedelta(hours=1), output_field=DateTimeField()),
        #                epg__end_time__gt=ExpressionWrapper(F('airTime') + timedelta(hours=1), output_field=DateTimeField()))
        # # Retrieve the required fields
        # result = result.values('networkname', 'spotId', 'airTime', 'airStatusCode', 'sfr_percentage',
        #                'total_volume', 'epg__emission_name')
        #
        # # Apply the ordering
        # result = result.order_by('airTime')
        # ptint(result)

        # return render(request,'DAIManagementApp/views_advertiser_adspots.html', {'data':{'adspots':adspots, 'brand' : brand , 'channels' : list(channels)}})
    else :

        brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand')
        brands_id = [x[0] for x in brands_id]
        brands = Brands.objects.filter(pk__in=brands_id, status=1).order_by('-id_brand')

        dic = []
        for i in brands:

            adspots = Adspots.objects.filter(id_brand=i)
            toutal = len(adspots)
            brand = {'id_brand' : i.id_brand,
                    'name_brand' : i.brand_name,
                    'description' : i.description,
                    'total_adspots' : toutal,
                    'logo' : i.logo }
            print(brand)
            dic.append(brand)


        return render(request , 'DAIManagementApp/report_advertiser.html' , {'brands':dic } )

@check_user
def report_advertiser(request, id_brand=""):
    from django.db.models import F, ExpressionWrapper, FloatField
    from django.db.models.functions import Substr
    from datetime import timedelta, datetime
    import pytz
    from django.db import connection

    def dictfetchall(cursor):
        "Return all rows from a cursor as a dict"
        columns = [col[0] for col in cursor.description]
        return [
            dict(zip(columns, row))
            for row in cursor.fetchall()
        ]

    def my_custom_sql(query, params):
        try:
            with connection.cursor() as cursor:
                cursor.execute(query, params)
                rows = cursor.fetchall()
                return rows
        except Exception as e:
            print(f"An error occurred: {e}")
            return []

    if request.method == 'POST':
        current_datetime = datetime.now()
        report_date = current_datetime.strftime('%Y-%m-%d')

        paris_tz = pytz.timezone('Europe/Paris')
        paris_time = datetime.now(paris_tz)
        utc_offset = paris_time.utcoffset().total_seconds() / 3600
        print(f"Current UTC offset for France (Paris) is GMT+{int(utc_offset)}.")

        start_day = request.POST.get('start_day').replace("-", "")
        end_day = request.POST.get('end_day').replace("-", "")
        selected_brand = request.POST.get('selected_brand')


        if utc_offset == 2:
            # normal one hardcoded for 2M:
            query_old = """
            SELECT networkname,
            spotId,
            DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime,
            airStatusCode,
            purcent AS sfr_percentage,
            CAST(MAX(purcent) * 1.25 * 4500000 / 17 AS UNSIGNED) AS total_volume,
            Epg.emission_name
            FROM Verifs
            LEFT JOIN (
            SELECT sfr_channel_name,
            `minute`,
            `day`,
            MAX(purcent) as purcent
            FROM SFR_analytics
            WHERE sfr_channel_name = '2M Maroc'
            GROUP BY sfr_channel_name, `minute`, `day`
            ) AS SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            Inner JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 2 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 2 HOUR))
            WHERE Verifs.spotId LIKE %s
            AND Verifs.airStatusCode = '0001'
            AND Verifs.broadcastDate > %s
            AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name
            ORDER BY airTime
            """


            query_11 = """
            SELECT networkname,
            spotId,
            DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR) as airTime,
            airStatusCode,
            purcent AS sfr_percentage,
            CAST(MAX(purcent) * 1.25 * 4500000 /
            CASE
            WHEN DATE(Verifs.airTime) < '2024-03-18' THEN 17
            ELSE 14
            END AS UNSIGNED) AS total_volume,
            Epg.emission_name
            FROM Verifs
            LEFT JOIN (
            SELECT sfr_channel_name,
            `minute`,
            `day`,
            MAX(purcent) as purcent
            FROM SFR_analytics
            WHERE sfr_channel_name = '2M Maroc'
            GROUP BY sfr_channel_name, `minute`, `day`
            ) AS SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            INNER JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 2 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 2 HOUR) AND Epg.verif_channel_id = Verifs.zonename)
            WHERE Verifs.spotId LIKE %s
            AND Verifs.airStatusCode = '0001'
            AND Verifs.broadcastDate > %s
            AND Verifs.broadcastDate < %s
            GROUP BY networkname, spotId, airTime, airStatusCode, purcent, Epg.emission_name
            ORDER BY airTime
            """



            query11 = """
            Select

            MIN(networkname) AS networkname,
            MIN(networkname) AS networkname,
            MIN(spotId) AS spotId,

            MIN(Epg.emission_name) AS emission_name,
            SUBSTRING(DATE_SUB(Verifs.airTime, INTERVAL 2 HOUR), 1, 19) as airTime,

            CAST(MAX(purcent) * 1.25 * 4500000 /
            CASE
                WHEN DATE(Verifs.broadcastDate) < '20240317' THEN 17
            ELSE 14 END AS UNSIGNED) AS total_volume

            FROM Verifs
            INNER JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
                         AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
                         AND SFR_analytics.sfr_channel_name = '2M Maroc'
            INNER JOIN Epg ON (Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 1 HOUR) AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 1 HOUR))
            WHERE Verifs.spotId LIKE %s
            AND Verifs.airStatusCode = '0001'
            AND Verifs.broadcastDate > %s
            AND Verifs.broadcastDate < %s
            GROUP BY SUBSTRING(Verifs.airTime, 1, 19), Verifs.broadcastDate
            ORDER BY airTime;
            """

            #made for Ramadan 2024
            query = """

            SELECT
            MIN(Verifs.networkname) AS networkname,
            MIN(spotId) AS spotId,
            CASE
            WHEN DATE(Verifs.airTime) > '2024-03-30' THEN SUBSTRING(Verifs.airTime, 1, 19)
            ELSE SUBSTRING(DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR), 1, 19)
            END AS airTime,
            MIN(Epg.emission_name) AS emission_name,
            CAST(MAX(purcent) * 1.25 * 4500000 /
            CASE
            WHEN DATE(Verifs.broadcastDate) < '2024-03-17' THEN Channels_zone.market_share
            ELSE Channels_zone.market_share
            END AS UNSIGNED) AS total_volume
            FROM Verifs
            INNER JOIN Channels_zone ON Channels_zone.id_zone_channel = Verifs.zonename
            Inner JOIN (
            SELECT sfr_channel_name, `minute`, `day`, MAX(purcent) AS purcent
            FROM SFR_analytics
            INNER JOIN Channels_zone ON Channels_zone.sfr_name = SFR_analytics.sfr_channel_name
            WHERE sfr_channel_name = Channels_zone.sfr_name
            GROUP BY sfr_channel_name, `minute`, `day`
            ) AS SFR_analytics ON (
            SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
            OR SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(ADDTIME(SFR_analytics.`minute`, '00:01:00'), 1, 5)
            )
            AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
            AND Channels_zone.sfr_name = SFR_analytics.sfr_channel_name
            INNER JOIN Epg ON (
            Epg.verif_channel_id = Verifs.networkname
            AND Verifs.airTime < DATE_ADD(Epg.End_time, INTERVAL 2 HOUR)
            AND Verifs.airTime > DATE_ADD(Epg.Start_time, INTERVAL 2 HOUR)
            )
            WHERE Verifs.spotId LIKE %s
            AND Verifs.airStatusCode = '0001'
            AND Verifs.broadcastDate > %s
            AND Verifs.broadcastDate < %s
            GROUP BY
            CASE
            WHEN DATE(Verifs.airTime) > '2024-03-30' THEN SUBSTRING(Verifs.airTime, 1, 19)
            ELSE SUBSTRING(DATE_SUB(Verifs.airTime, INTERVAL 1 HOUR), 1, 19)
            END,
            Verifs.broadcastDate,
            Channels_zone.market_share,
            SFR_analytics.sfr_channel_name
            ORDER BY airTime;



            """

        cursor = connection.cursor()
        selected_brand = selected_brand.capitalize()
        data_tuple = (f"%{selected_brand}%", start_day, end_day)
        cursor.execute(query, data_tuple)
        data = dictfetchall(cursor)

        processed_data = []
        last_date = None
        total = 0

        for row in data:
            row_date = datetime.strptime(row['airTime'], '%Y-%m-%d %H:%M:%S').date()
            if last_date and row_date != last_date:
                processed_data.append({'networkname': '--', 'spotId': '--', 'airTime': '--', 'total_volume': '--', 'emission_name': '--'})
            processed_data.append(row)
            last_date = row_date
            if row['total_volume'] is not None:
                total += int(row['total_volume'])

        return render(request, 'DAIManagementApp/view_report.html', {'data': processed_data, 'total': total, 'selected_brand': selected_brand, 'report_date': report_date})

    else:
        brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand', flat=True)
        brands = Brands.objects.filter(pk__in=brands_id, status=1).order_by('-id_brand')

        dic = []
        for brand in brands:
            adspots = Adspots.objects.filter(id_brand=brand)
            total_adspots = len(adspots)
            brand_info = {
                'id_brand': brand.id_brand,
                'name_brand': brand.brand_name,
                'description': brand.description,
                'total_adspots': total_adspots,
                'logo': brand.logo
            }
            dic.append(brand_info)

        return render(request, 'DAIManagementApp/report_advertiser.html', {'brands': dic})

@check_user
def views_advertiser(request ,id_brand=""):
    if id_brand != "":

        brand =  Brands.objects.get(pk=id_brand)
        channels = Adspots.objects.filter(id_brand=id_brand).values_list('id_channel').distinct()
        channels =  [ i[0]  for i in list(channels)]
        channels = Channels.objects.filter(pk__in = channels)
        adspots = Adspots.objects.filter(id_brand=id_brand).order_by('id_channel')

        return render(request,'DAIManagementApp/views_advertiser_adspots.html', {'data':{'adspots':adspots, 'brand' : brand , 'channels' : list(channels)}})
    else :
        brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand')
        brands_id = [x[0] for x in brands_id]
        brands = Brands.objects.filter(pk__in=brands_id).order_by('-id_brand')

        dic = []
        for i in brands:

            adspots = Adspots.objects.filter(id_brand=i)
            toutal = len(adspots)
            brand = {'id_brand' : i.id_brand,
                    'name_brand' : i.brand_name,
                    'total_adspots' : toutal,
                    'logo' : i.logo }
            print(brand)
            dic.append(brand)

        return render(request , 'DAIManagementApp/views_advertiser.html' , {'brands':dic } )

@check_user
def add_advertiser(request):
    if request.method =='POST':
        advertiser_name = request.POST.get('advertiser_name')
        category = request.POST.get('category')
        advertiser_desc = request.POST.get('advertiser_desc')
        active_0_1 = request.POST.get('active_0_1')
        now = datetime.datetime.now()
        path = "static/Advertisers_imgs"
        now_infile = str(now).replace(" ", "_")
        now_infile = now_infile.replace(":", "-")
        filename = advertiser_name+"__"+now_infile+".png"
        try :
            handle_uploaded_file(request.FILES['advertiser_logo'], path, filename)
        except:
            import shutil
            shutil.move('logo.png',path+'/'+filename)
        Brand = Brands(brand_name=advertiser_name ,description=advertiser_desc, category=category, status=active_0_1, logo=path+'/'+filename)
        activite ='Add Advertirer'
        desc = 'Admin Add advertiser  id: ' + str(Brand.id_brand)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        Brand.save()
        Advertiser_user = UsersAdvertisers(id_user_id=request.session['id_user'], id_brand_id=Brand.id_brand, status=active_0_1, datetime=now)
        Advertiser_user.save()
        return redirect('/DAIManagement/views_advertiser/')
    else:
        return render(request, "DAIManagementApp/add_advertiser.html")

@check_user
def edit_advertiser(request,id_brands="1"):
    print(request.method)
    if request.method == 'POST':
        print(id_brands)
        brand = Brands.objects.get(pk=id_brands)
        return render(request , 'DAIManagementApp/edit_advertiser_id.html', {'brand' : brand})
    else:
        # brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).select_related('id_brand').all()
        # brands = Brands.objects.filter(pk__in=brands_id)
        brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand')
        brands_id = [x[0] for x in brands_id]
        brands = Brands.objects.filter(pk__in=brands_id).order_by('-id_brand')
        return render(request,'DAIManagementApp/edit_advertiser.html',{ 'brands' : brands})

@check_user
def update_advertiser(request , id_brand =""):
    brand = Brands.objects.get(pk=id_brand)
    advertiser_name = request.POST.get('advertiser_name')
    category = request.POST.get('category')
    advertiser_desc = request.POST.get('advertiser_desc')
    active_0_1 = request.POST.get('active_0_1')
    now = datetime.datetime.now()
    try :
        path = "Advertisers_imgs"
        now_infile = str(now).replace(" ", "_")
        now_infile = now_infile.replace(":", "-")
        filename = advertiser_name+"__"+now_infile+".png"
        handle_uploaded_file(request.FILES['advertiser_logo'], 'static/'+path, filename)
        brand.logo = 'static/'+path+'/'+filename
    except :
        pass
    if advertiser_name != '' :
        brand.brand_name = advertiser_name
    if category != '':
        brand.category = category
    if advertiser_desc != '':
        brand.description = advertiser_desc
    if active_0_1 != '':
        brand.status = active_0_1

    activite = 'Edit Advertirer'
    desc = 'Admin edit advertiser  id: ' + str(brand.id_brand)
    activity = Activity(activity=activite , date=now ,description=desc )
    activity.save()
    brand.save()
    brands = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").select_related('id_brand').all()
    return redirect('/DAIManagement/views_advertiser/')


@check_user
def disable(request , id_brand=""):

    if request.method == 'POST':
        now = datetime.datetime.now()
        activite = 'Disable Advertirer'
        desc = 'Admin disable advertiser  id: ' + str(id_brand)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        brand = Brands.objects.get(pk=id_brand)
        status = (1+ int(brand.status) ) % 2
        brand.status = str(status)
        brand.save()

    # brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).select_related('id_brand').all()
    # brands = Brands.objects.filter(pk__in=brands_id).order_by('-id_brand')

    brands_id = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).values_list('id_brand')
    brands_id = [x[0] for x in brands_id]
    brands = Brands.objects.filter(pk__in=brands_id).order_by('-id_brand')

    return render(request,'DAIManagementApp/disable_advertiser.html',{ 'brands' : brands})


@check_user
def add_creative(request):
    if request.method =='POST':
        # channel_id = request.POST.get('channel_id')
        # advertiser_id = request.POST.get('advertiser_id')
        # channel_list = request.POST.getlist('channel_id')

        campaign_id = request.POST.get('campaign_id')              # campaign id
        adspot_name = request.POST.get('adspot_name')              # creative name
        adspot_duration = request.POST.get('adspot_duration')      # creative duration
        active_0_1 = request.POST.get('active_0_1')                # creative status
        vast_info = request.POST.get('creative_id')                # creative vast_id
        now = datetime.datetime.now()
        now_infile = str(now).replace(" ", "_").replace(":", "-").replace(".","-")

        if campaign_id != None:

            path = "adspots/user_"+str(request.session['id_user'])+"/campaign_"+str(campaign_id)
            os.makedirs("static/" + path, exist_ok=True)

            if 'adspot_file' in request.FILES:
                filename = request.FILES['adspot_file'].name
                # filename = "ch_"+str(channel)+"__"+slugify(adspot_name)+"__"+str(adspot_duration)+"sec__"+new_now_infile+".ts"
                handle_uploaded_file(request.FILES['adspot_file'], "static/"+path, filename)
            else:
                # Handle the case when 'adspot_file' is not present in the request
                filename = ""  # or set it to some default value or raise an error
            #
            campaign = Campaigns.objects.get(pk=campaign_id)
            #
            creative = Adspots(
                id_campaign = campaign,
                adspot_name=adspot_name,
                duration=adspot_duration,
                status=active_0_1,
                original_filepath=path+'/'+filename ,
                datetime=now,
                filename=filename,
                creative_id=vast_info
            )
            creative.save()

            #! Need to be re-coded
            # channel_name = Channels.objects.get(pk=channel)
            # DST_FOLDER = channel_name.channel_name +'/'+'spots'
            # SRC_FILEPATH = "static/"+path+'/'+ filename
            # send_adspot(DST_FOLDER,SRC_FILEPATH)

            activite = 'Add Creative'
            desc = 'Admin Add creative  id: ' + str(creative.id_adpost)
            activity = Activity(activity=activite , date=now ,description=desc )
            activity.save()

        return redirect("/DAIManagement/edit_creative/")
    else:
        # channels=Channels.objects.filter(id_user=request.session['id_user'])
        # advertisers = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").select_related('id_brand').all()
        campaigns = Campaigns.objects.filter(id_user=request.session['id_user'])
        return render(request, "DAIManagementApp/add_creative.html" ,{'data':{'campaigns':campaigns  }})

@check_user
def edit_creative(request , id_adpost="1"):
    if request.method =='POST':
        adpost = Adspots.objects.get(pk=id_adpost)

        # channels=Channels.objects.filter(id_user=request.session['id_user']).order_by('-id_channel')
        # advertisers = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").select_related('id_brand').all().order_by('-id_user_advertiser')
        campaigns = Campaigns.objects.filter(id_user=request.session['id_user'])
        return render(
            request,
            "DAIManagementApp/edit_creative_id.html",
            {
                'adspost' : adpost,
                'campaigns': campaigns
            }
        )
    else :
        id_session = str(request.session['id_user'])

        # channels = Channels.objects.filter(id_user=id_session).order_by('-id_channel').values_list('id_channel',flat=True)
        # brands = UsersAdvertisers.objects.filter(id_user=id_session).order_by('-id_user_advertiser').values_list('id_brand',flat=True)
        # adspots = Adspots.objects.filter(id_channel__in=channels).filter(id_brand__in=brands).order_by('-id_adpost')
        # =======================================================
        campaigns = Campaigns.objects.filter(id_user=request.session['id_user']).values_list('id_campaign', flat=True)
        adspots_nn = Adspots.objects.filter(id_campaign__in=campaigns).order_by('-id_adpost')
        # =======================================================
        return render(
            request ,
            "DAIManagementApp/edit_creative.html",
            {'adspots': adspots_nn}
        )

@check_user
def update(request , id_adpost  ):
    if request.method == "POST":
        creative = Adspots.objects.get(pk=id_adpost)

        campaign_id = request.POST.get('campaign_id')              # campaign id
        adspot_name = request.POST.get('adspot_name')              # creative name
        adspot_duration = request.POST.get('adspot_duration')      # creative duration
        active_0_1 = request.POST.get('active_0_1')                # creative status
        vast_info = request.POST.get('creative_id')                # creative vast_id

        channel_id = request.POST.get('channel_id')
        advertiser_id = request.POST.get('advertiser_id')
        adspot_name = request.POST.get('adspot_name')
        adspot_duration = request.POST.get('adspot_duration')
        active_0_1 = request.POST.get('active_0_1')

        #
        campaign = Campaigns.objects.get(pk=campaign_id)
        #
        now = datetime.datetime.now()
        now_infile = str(now).replace(" ", "_").replace(":", "-")
        #
        path = "adspots/user_"+str(request.session['id_user'])+"/campaign_"+str(channel_id)
        filename = "campaign_"+str(channel_id)+"__"+slugify(adspot_name)+"__"+str(adspot_duration)+"sec__"+now_infile+".ts"
        filename_m3u8 = "campaign_"+str(channel_id)+"__"+adspot_name+"__"+str(adspot_duration)+"sec__"+now_infile+".m3u8"
        #
        os.makedirs("static/" + path, exist_ok=True)
        #
        if 'adspot_file' in request.FILES:
            try :
                #
                handle_uploaded_file(request.FILES['adspot_file'], "static/"+path, filename)
                #
                f = open("static/"+path+"/"+filename_m3u8, "a")
                f.write("#EXTM3U \n")
                f.write("#EXT-X-VERSION:3 \n")
                f.write("#EXT-X-MEDIA-SEQUENCE:0 \n")
                f.write("#EXTINF:"+adspot_duration+", \n")
                f.write(filename)
                f.close()
            except :
                import shutil
                if os.path.exists("static/"+ creative.original_filepath):
                    shutil.move("static/"+ creative.original_filepath, "static/"+path+"/"+filename)
        else:
            # Handle the case when 'adspot_file' is not present in the request
            filename = ""  # or set it to some default value or raise an error

        creative.original_filepath = path+"/"+filename

        #! Need to be re-coded
        # channel_name = Channels.objects.get(pk=channel_id)
        # DST_FOLDER = channel_name.channel_name +'/'+'spots'
        # SRC_FILEPATH = "static/"+path+'/'+ filename
        # send_adspot(DST_FOLDER,SRC_FILEPATH)

@check_user
def update(request , id_adpost  ):
    if request.method == "POST":
        creative = Adspots.objects.get(pk=id_adpost)

        campaign_id = request.POST.get('campaign_id')              # campaign id
        adspot_name = request.POST.get('adspot_name')              # creative name
        adspot_duration = request.POST.get('adspot_duration')      # creative duration
        active_0_1 = request.POST.get('active_0_1')                # creative status
        vast_info = request.POST.get('creative_id')                # creative vast_id

        # channel_id = request.POST.get('channel_id')
        # advertiser_id = request.POST.get('advertiser_id')
        # adspot_name = request.POST.get('adspot_name')
        # adspot_duration = request.POST.get('adspot_duration')
        # active_0_1 = request.POST.get('active_0_1')

        #
        campaign = Campaigns.objects.get(pk=campaign_id)
        #
        now = datetime.datetime.now()
        now_infile = str(now).replace(" ", "_").replace(":", "-")
        #
        path = "adspots/user_"+str(request.session['id_user'])+"/campaign_"+str(campaign_id)
        filename = "campaign_"+str(campaign_id)+"__"+slugify(adspot_name)+"__"+str(adspot_duration)+"sec__"+now_infile+".ts"
        filename_m3u8 = "campaign_"+str(campaign_id)+"__"+adspot_name+"__"+str(adspot_duration)+"sec__"+now_infile+".m3u8"
        #
        os.makedirs("static/" + path, exist_ok=True)
        #
        if 'adspot_file' in request.FILES:
            try :
                #
                handle_uploaded_file(request.FILES['adspot_file'], "static/"+path, filename)
                #
                f = open("static/"+path+"/"+filename_m3u8, "a")
                f.write("#EXTM3U \n")
                f.write("#EXT-X-VERSION:3 \n")
                f.write("#EXT-X-MEDIA-SEQUENCE:0 \n")
                f.write("#EXTINF:"+adspot_duration+", \n")
                f.write(filename)
                f.close()
            except :
                import shutil
                if os.path.exists("static/"+ creative.original_filepath):
                    shutil.move("static/"+ creative.original_filepath, "static/"+path+"/"+filename)
        else:
            # Handle the case when 'adspot_file' is not present in the request
            filename = creative.filename  # or set it to some default value or raise an error

        creative.original_filepath = path+"/"+filename

        #! Need to be re-coded
        # channel_name = Channels.objects.get(pk=channel_id)
        # DST_FOLDER = channel_name.channel_name +'/'+'spots'
        # SRC_FILEPATH = "static/"+path+'/'+ filename
        # send_adspot(DST_FOLDER,SRC_FILEPATH)

        if creative.id_campaign.id_campaign != campaign_id:
            creative.id_campaign = campaign
        if creative.adspot_name != adspot_name:
            creative.adspot_name = adspot_name
        if creative.duration != adspot_duration:
            creative.duration = adspot_duration
        if creative.status != active_0_1:
            creative.status = active_0_1
        if creative.creative_id != vast_info:
            creative.creative_id = vast_info
        creative.datetime=now

        activite = 'Edit Creative'
        desc = 'Admin Edit creative  id: ' + str(creative.id_brand)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()

        creative.save()
        return redirect("/DAIManagement/edit_creative/")

@check_user
def views_creative(request):
    adspots = Adspots.objects.all().order_by('-id_post')
    return render(request , 'DAIManagementApp/views_creative.html',{'adspots':adspots})


@check_user
def new_playlist_old(request):
    if request.method == 'POST':

        useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
        channel_id = request.POST.get('channel_id')
        channeldata = Channels.objects.get(id_channel=channel_id)
        zonename = request.POST.get('zonename')
        daydate = request.POST.get('day')
        number_of_windows = request.POST.get('numofwin')
        channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
        print('TYYYYYYYYYYYYYYYYYYYYYPE',type(channel_zone))
        print('channel_zone',channel_zone)
        daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
        daydate = daydate.strftime('%Y-%m-%d')
        start_date = str(daydate) + "T00:01:00+00:00"
        end_date = str(daydate) + "T23:59:00+00:00"
        now = datetime.datetime.now()
        Playlist = Playlists(id_channel_id=channel_id ,version="1", broadcastdate=str(daydate), start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel)
        Playlist.save()
        traffic = 0
        # starts =[]
        for i in range(int(number_of_windows)):
            number_of_ads = request.POST.get('numofads['+str(i)+']')
            if request.POST.get('numofads['+str(i)+']'):
                number_of_ads = request.POST.get('numofads['+str(i)+']')
                window_start = request.POST.get('windowstart['+str(i)+']')
                window_start = daydate+' '+window_start+':00'
                window_end = request.POST.get('windowend['+str(i)+']')
                window_end = daydate+' '+window_end+':00'
                FMT = '%Y-%m-%d %H:%M:%S'
                window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                window_duration = window_duration.strftime('%H%M%S00')

                Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                Window.save()
                Avail = Avails(id_window_id=Window.id_window, avail_start=window_start, availinwindow='1', datetime=now )
                Avail.save()
                for j in range(int(number_of_ads)):
                    if request.POST.get('ad['+str(i)+']['+str(j)+']'):
                        adspot = request.POST.get('ad['+str(i)+']['+str(j)+']')
                        traffic +=1
                        AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(j+1), trafficid=traffic)
                        AdspotsInAv.save()
        xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, '1' )
        path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
        # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)
        # channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, "DAIManagementApp/new_playlist.html")
    else:
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, "DAIManagementApp/new_playlist.html",{'data': {'channels': channels}})

@check_user
def load_adspots(request):
    channel_id = request.GET.get('channel')

    adspots = Adspots.objects.filter(id_channel=channel_id, status="1").select_related('id_brand').all()

    return render(request, 'DAIManagementApp/dropdown_adspots.html', {'adspots': adspots})

@check_user
def load_regions(request):
    channel_id = request.GET.get('channel')
    regions = ChannelsZone.objects.filter(id_channel=channel_id)
    return render(request,'DAIManagementApp/dropdown_region.html',{'regions': regions})

@check_user
def edit_playlist_old(request,id_playlist="1"):
    if request.method == 'POST':
        playlist = Playlists.objects.get(pk=id_playlist)
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
        d = playlist.broadcastdate

        windows = Windows.objects.filter(id_playlist=playlist.id_playlist)

        win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)
        avails = Avails.objects.filter(id_window__in=win).values_list('id_avail', flat=True)
        av = Avails.objects.filter(id_window__in=win);
        adspots = AdspotsInAvail.objects.filter(id_avail__in=avails)

        data = {'playlist': playlist, 'channels': channels,'region':region,'d':d,'windows':windows,'adspots':adspots, 'av':av }
        return render(request, "DAIManagementApp/edit_playlist_id.html", data)
    else:
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels).order_by('-id_playlist')
        data = {'playlists': playlists}
        return render(request, "DAIManagementApp/edit_playlist.html",data)

# Updated With Static FTP Connection
@check_user
def new_playlist(request):
    if request.method == 'POST':
        apply_btn = request.POST.get("apply_btn")
        draft_btn = request.POST.get("draft_btn")

        if(apply_btn):
            #
            useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
            #
            channel_id = request.POST.get('channel_id')
            channeldata = Channels.objects.get(id_channel=channel_id)
            zonename = request.POST.get('zonename', '').strip()
            region, zone, network = zonename.split('-')

            channel_zone = ChannelsZone.objects.get(
                id_channel=channel_id,
                region=region,
                zonename=zone,
                networkname=network
            )
            #
            number_of_windows = request.POST.get('numofwin')
            #
            draft_version = request.POST.get('draft_version')
            #
            daydate = request.POST.get('day')
            daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
            daydate = daydate.strftime('%Y-%m-%d')
            #
            Playlist = Playlists(
                id_channel_id      = channel_id ,
                version            = "1",
                broadcastdate      = f"{daydate}",
                start_date         = f"{daydate}T00:01:00+00:00",
                end_date           = f"{daydate}T23:59:00+00:00",
                creation_datetime  = datetime.datetime.now(),
                id_zone_channel_id = channel_zone.id_zone_channel,
                is_draft           = '0',
                draft_version      = '0'
            )
            Playlist.save()
            #
            traffic = 0
            record_inserted = 0
            #
            if number_of_windows and (int(number_of_windows) > 0):
                #
                for i in range(int(number_of_windows)):
                    if request.POST.get('numofavails['+str(i)+']'):
                        #
                        numofavails = request.POST.get('numofavails['+str(i)+']')
                        #
                        window_start = request.POST.get('windowstart['+str(i)+']')
                        window_start = daydate+' '+window_start+':00'
                        window_end = request.POST.get('windowend['+str(i)+']')
                        window_end = daydate+' '+window_end+':00'
                        #
                        FMT = '%Y-%m-%d %H:%M:%S'
                        #
                        window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                        window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                        window_duration = window_duration.strftime('%H%M%S00')
                        #
                        Window = Windows(
                            id_playlist_id=Playlist.id_playlist,
                            window_start=window_start,
                            window_end=window_end,
                            window_duration=window_duration
                        )
                        Window.save()

                        for j in range(int(numofavails)):
                            if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                                #
                                av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                                av_start = daydate+' '+av_start+':00'
                                #
                                number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                                #
                                Avail = Avails(
                                    id_window_id  = Window.id_window,
                                    avail_start   = av_start,
                                    availinwindow = str(j+1),
                                    datetime      = datetime.datetime.now(),
                                )
                                Avail.save()

                                for k in range(int(number_of_ads)):
                                    if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                        #
                                        adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                        #
                                        traffic +=1
                                        #
                                        AdspotsInAv = AdspotsInAvail(
                                            id_avail_id     = Avail.id_avail,
                                            id_adspot_id    = adspot,
                                            positioninavail = str(k+1),
                                            trafficid       = traffic
                                        )
                                        AdspotsInAv.save()

                #
                # xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, '1',Playlist.draft_version)
                # path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
                # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)

                xml_playlist_res = generateSchedule(Playlist)

                # Check the result
                if xml_playlist_res["status"]:
                    ftp_host = "172.16.205.1"
                    ftp_port =  60000
                    ftp_username = "SanoaMedia"
                    ftp_password = "SanoaMedia"
                    local_file_path = xml_playlist_res["file"]
                    remote_file_path = "2M/schedules/REG1_NL"

                    if Playlist.id_zone_channel.networkname == "REGNL":
                        remote_file_path = "2M/schedules/REG1_NL"
                    elif Playlist.id_zone_channel.networkname == "2M":
                        remote_file_path = "2M/schedules/REG1_FR"
                    # uploadFTP(ftp_server, ftp_port, ftp_user, ftp_password, xml_file_path , ftp_upload_path)
                    ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
                    # Upload a file to the server
                    ftp.upload_file(local_file_path, remote_file_path)
                else:
                    print(f"Failed to generate playlist file. Error: {xml_playlist_res['message']}")

                activity = Activity(
                    activity    = "Add PlayList",
                    date        = datetime.datetime.now(),
                    description = f"Admin Add PlayList  id: { Playlist.id_playlist }"
                )
                activity.save()

                ############ Display data on view_playlist_id page ##################
                return redirect('view_playlist', id_playlist = Playlist.id_playlist)
            #
            return redirect('new_playlist')
        else:
            useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
            channel_id = request.POST.get('channel_id')
            channeldata = Channels.objects.get(id_channel=channel_id)
            zonename = request.POST.get('zonename')
            daydate = request.POST.get('day')
            number_of_windows = request.POST.get('numofwin')
            channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)

            daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
            daydate = daydate.strftime('%Y-%m-%d')
            start_date = str(daydate) + "T00:01:00+00:00"
            end_date = str(daydate) + "T23:59:00+00:00"
            now = datetime.datetime.now()
            Playlist = Playlists(id_channel_id=channel_id ,version="1", broadcastdate=str(daydate), start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel,is_draft='1',draft_version='1')
            Playlist.save()
            traffic = 0
            record_inserted = 0
            for i in range(int(number_of_windows)):
                if request.POST.get('numofavails['+str(i)+']'):
                    numofavails = request.POST.get('numofavails['+str(i)+']')
                    window_start = request.POST.get('windowstart['+str(i)+']')
                    window_start = daydate+' '+window_start+':00'
                    window_end = request.POST.get('windowend['+str(i)+']')
                    window_end = daydate+' '+window_end+':00'
                    FMT = '%Y-%m-%d %H:%M:%S'
                    window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                    window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                    window_duration = window_duration.strftime('%H%M%S00')
                    Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                    Window.save()

                    for j in range(int(numofavails)):
                        if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                            av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                            av_start = daydate+' '+av_start+':00'
                            number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                            Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                            Avail.save()

                            for k in range(int(number_of_ads)):
                                if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                    adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                    traffic +=1
                                    AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                                    AdspotsInAv.save()
            # xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, '1' )
            # path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
            # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)
            last_id_playlist = Playlist.id_playlist

            ############ Display data on view_playlist_id page ##################
            return redirect('draft_playlist')
    else:
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, "DAIManagementApp/new_playlist.html",{'data': {'channels': channels}})


@check_user
def edit_playlist(request,id_playlist="1"):
    if request.method == 'POST':
        edit_btn = request.POST.get("edit_btn")
        view_btn = request.POST.get("view_btn")
        insertion_results_btn = request.POST.get("insertion_results_btn")
        duplicate_btn = request.POST.get("duplicate_btn")
        draft_btn = request.POST.get("draft_btn")
        if(edit_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(
                id_zone_channel=playlist.id_zone_channel.id_zone_channel,
                region=playlist.id_zone_channel.region,
                zonename=playlist.id_zone_channel.zonename,
                networkname=playlist.id_zone_channel.networkname
            )
            # region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            now = datetime.datetime.now()
            activite = 'Edit PlayList'
            desc = 'Admin Edit PlayList  id: ' + str(playlist.id_playlist)
            activity = Activity(activity=activite , date=now ,description=desc )
            activity.save()

            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/edit_playlist_id.html", data_playlist)
        elif(duplicate_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(
                id_zone_channel=playlist.id_zone_channel.id_zone_channel,
                region=playlist.id_zone_channel.region,
                zonename=playlist.id_zone_channel.zonename,
                networkname=playlist.id_zone_channel.networkname
            )
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/duplicate_playlist_id.html", data_playlist)
        elif(view_btn):
            return redirect('view_playlist', id_playlist=id_playlist)
        else:
            return redirect('insertion_results', id_playlist=id_playlist)
    else:
        now = datetime.datetime.now() - timedelta(days=30)
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels,is_draft='0',start_date__gte=now).order_by('-broadcastdate')
        windows= Windows.objects.filter(id_playlist__in=playlists)
        details = []
        for window in windows :
            detail = {}
            detail["id_playlist"]=window.id_playlist.id_playlist
            broadcastDate_verif = str(window.window_start).split(' ')[0]
            broadcastDate_verif = broadcastDate_verif.replace('-','')
            detail["window_start"] = str(window.window_start).split(' ')[1]
            detail["window_end"] = str(window.window_end).split(' ')[1]
            avails = Avails.objects.filter(id_window=window)
            detail["avails"] = []
            # for avail in avails:
            #     av = {}
            #     av['start']= str(avail.avail_start).split(' ')[1]
            #     AdspotsInAv = AdspotsInAvail.objects.filter(id_avail=avail)
            #     for adspot in AdspotsInAv:
            #         av["adspot"] = AdspotsInAvail.objects.filter(id_avail=avail)
            #         try:
            #             result = Verifs.objects.filter(broadcastDate = broadcastDate_verif, trafficId = adspot.trafficid).latest('id_verif')
            #             av["adspotname"] = adspot.id_adspot.adspot_name
            #             av["airStatusCode"] = result.airStatuscode
            #             av["airTime"] = result.airTime
            #         except Verifs.DoesNotExist:
            #             print('oups')
            #         else:
            #             print("file not ready yet")
            #     detail["avails"].append(av)
            # details.append(detail)


        data = {'playlists': playlists , 'details':details}
        return render(request, "DAIManagementApp/edit_playlist.html",data)


@check_user
def edit_older_playlist(request,id_playlist="1"):
    if request.method == 'POST':
        edit_btn = request.POST.get("edit_btn")
        view_btn = request.POST.get("view_btn")
        insertion_results_btn = request.POST.get("insertion_results_btn")
        duplicate_btn = request.POST.get("duplicate_btn")
        draft_btn = request.POST.get("draft_btn")
        if(edit_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            now = datetime.datetime.now()
            activite = 'Edit PlayList'
            desc = 'Admin Edit PlayList  id: ' + str(playlist.id_playlist)
            activity = Activity(activity=activite , date=now ,description=desc )
            activity.save()

            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/edit_playlist_id.html", data_playlist)
        elif(duplicate_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/duplicate_playlist_id.html", data_playlist)
        elif(view_btn):
            return redirect('view_playlist', id_playlist=id_playlist)
        else:
            return redirect('insertion_results', id_playlist=id_playlist)
    else:
        now = datetime.datetime.now() - timedelta(days=30)
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels,is_draft='0',start_date__lte=now).order_by('-broadcastdate')

        data = {'playlists': playlists}
        return render(request, "DAIManagementApp/edit_older_playlist.html",data)

# @check_user
# def insertion_results(request, id_playlist):
#     # Retrieve the playlist object with the specified primary key
#     playlist = Playlists.objects.get(pk=id_playlist)
#     # Retrieve the channels object associated with the playlist's channel ID
#     channels = Channels.objects.get(id_channel=playlist.id_channel.id_channel)
#     # Retrieve the region information associated with the playlist's zone channel
#     region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)

#     ftp_channel_name = channels.ftp_channel_name
#     #
#     networkname = channels.networkname
#     #
#     zonename = region.zonename
#     #
#     broadcastdate = playlist.broadcastdate.replace("-","")
#     #
#     result = Verifs.objects.filter(networkname=networkname,zonename=zonename,broadcastDate=broadcastdate).last()
#     #
#     d = playlist.broadcastdate
#     #
#     broadcastDate_verif = str(playlist.broadcastdate).replace("-","")
#     #
#     win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)
#     #
#     data = {}
#     #
#     data["windows"] = []
#     #
#     data["number_of_wins"] = 0
#     #
#     i = 0
#     #
#     record_inserted = 0
#     #
#     if(result):
#         verComplete = result.vercomplete
#     else:
#         verComplete = "false"
#     #
#     if verComplete == "false":
#     # if true:
#         print("Wait I'm in ftp")
#         filename_in_ftp = broadcastdate+"-"+zonename+"-00001.ver"
#         path_in_ftp = "/"+ftp_channel_name+"/verifs/"
#         path_in_app = "files/results/"+ftp_channel_name+"/verifs"
#         if not os.path.exists(path_in_app):
#             os.makedirs(path_in_app)
#         # downloadFTP("uk06.tmd.cloud", "testftp@epgsano.com", "I?#=s3FfnSu_", "/2M/schedules/",  "test.txt" , "/var/www/html/DAI-Management/DAIManagement/FTP_files/")
#         useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
#         print(useraccess)

#         ftp_host = "172.16.205.1"
#         ftp_port =  60000
#         ftp_username = "SanoaMedia"
#         ftp_password = "SanoaMedia"


#         filename_in_ftp = broadcastdate+"-2005-00001.ver"
#         filename_in_local = broadcastdate+"-2005-00001.ver"
#         remote_path = f"2M/verifs/{filename_in_ftp}"
#         local_path = f"files/results/{ftp_channel_name}/verifs/{filename_in_local}"

#         #
#         ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
#         # Download a file from the server
#         ftp.download_file(remote_path, local_path)

#         # downloadFTP(useraccess.ftp_server, useraccess.ftp_user, useraccess.ftp_password, path_in_ftp , filename_in_ftp, path_in_app)
#         # def downloadFTP(host, user, password, filepath_inftp, file_inftp,  localpath):
#         if Path(path_in_app+'/'+filename_in_local).exists():
#             doc = xml2.parse(path_in_app+'/'+filename_in_local)
#             Spots = doc.getElementsByTagName("Spot")
#             verComplete = doc.firstChild.getAttribute("verComplete")
#             results = []
#             for spot in Spots:
#                 trafficId = spot.getAttribute("trafficId")
#                 spotId  = spot.getAttribute("spotId")
#                 airTime = spot.getAttribute("airTime")
#                 newAirTime = airTime.replace("T", " ")
#                 newAirTime2 = newAirTime.replace("+02:00", "")
#                 airLength = spot.getAttribute("airLength")
#                 airStatusCode = spot.getAttribute("airStatusCode")
#                 version = spot.getAttribute("revision")
#                 try:
#                     verif_to_update = Verifs.objects.get(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId)
#                     if verif_to_update:
#                         verif_to_update.airTime = newAirTime2
#                         verif_to_update.airLength = airLength
#                         verif_to_update.airStatuscode = airStatusCode
#                         verif_to_update.revision = version
#                         verif_to_update.vercomplete = verComplete
#                         verif_to_update.save()
#                     else:
#                         new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
#                         new_ad_verif.save()
#                 except Verifs.DoesNotExist:
#                     print('oups')
#                     new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
#                     new_ad_verif.save()
#     #
#     windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
#     #
#     for window in windows:
#         window_dic = {}
#         window_dic['i'] = i
#         i = i+1
#         window_dic['id_window'] = window.id_window
#         window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
#         window_start_formated_2 = window_start_formated.strftime("%H:%M")
#         window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
#         window_end_formated_2 = window_end_formated.strftime("%H:%M")
#         window_dic['window_start'] = window_start_formated_2
#         window_dic['window_end'] = window_end_formated_2
#         window_dic['avails_in_win'] = []
#         window_dic['num_of_avails'] = 0
#         data["number_of_wins"] = i
#         avails = Avails.objects.filter(id_window=window.id_window)
#         j = 0
#         for avail in avails:
#             avail_dic = {}
#             avail_dic["j"] = j
#             j = j+1
#             avail_dic["id_avail"] = avail.id_avail
#             avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
#             avail_start_formated2 = avail_start_formated.strftime("%H:%M")
#             avail_dic["avail_start"] = avail_start_formated2
#             avail_dic["adspots_in_avail"] = []
#             window_dic["avails_in_win"].append(avail_dic)
#             adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
#             window_dic['num_of_avails'] = j
#             k = 0
#             avail_dic["num_of_adspots"] = 0
#             for adspot in adspots:
#                 adspot_dic = {}
#                 adspot_dic["k"] = k
#                 k = k+1
#                 avail_dic["num_of_adspots"] = k
#                 adspot_dic["id_adsinavail"] = adspot.id_adsinavail
#                 adspot_dic["id_adspot"] = adspot.id_adspot
#                 print("Wait I'm in database")
#                 try:
#                     result = Verifs.objects.filter(broadcastDate = broadcastDate_verif, trafficId = adspot.trafficid, revision__lte = int(playlist.version)).latest('id_verif')
#                     adspot_dic["airStatusCode"] = result.airStatuscode
#                     adspot_dic["airTime"] = result.airTime
#                 except Verifs.DoesNotExist:
#                     print('oups')
#                 else:
#                     print("file not ready yet")
#                 avail_dic["adspots_in_avail"].append(adspot_dic)
#         data["windows"].append(window_dic)
#         record_inserted = 0
#     #
#     data_playlist = {
#         'data':data["windows"],
#         'playlist': playlist,
#         'channels': channels,
#         'region':region,
#         'd':d,
#         'number_of_wins':data["number_of_wins"],
#         'record_inserted':record_inserted
#     }
#     return render(
#         request,
#         "DAIManagementApp/insertion_results.html",
#         data_playlist
#     )


@check_user
def insertion_results(request, id_playlist):
    # Retrieve the playlist object with the specified primary key
    playlist = Playlists.objects.get(pk=id_playlist)
    # Retrieve the channels object associated with the playlist's channel ID
    channels = Channels.objects.get(id_channel=playlist.id_channel.id_channel)
    # Retrieve the region information associated with the playlist's zone channel
    region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
    #
    ftp_channel_name = channels.ftp_channel_name
    #
    networkname = channels.networkname
    #
    broadcastdate = playlist.broadcastdate.replace("-","")
    #
    zonename = region.zonename if  channels.id_channel != 1 else "2005"
    #
    verifs_number = region.verifs_number
    #
    result = Verifs.objects.filter(
        networkname=networkname,
        zonename=region.id_zone_channel,
        broadcastDate=broadcastdate
    ).last()
    #
    d = playlist.broadcastdate
    #
    broadcastDate_verif = str(playlist.broadcastdate).replace("-","")
    #
    win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)
    #
    data = {}
    #
    data["windows"] = []
    #
    data["number_of_wins"] = 0
    #
    i = 0
    #
    record_inserted = 0
    #
    verComplete = result.vercomplete if(result) else "false"
    #
    filename_in_ftp = broadcastdate+"-"+zonename+"-"+verifs_number+".ver"

    #
    if verComplete == "false":
    # if true:
        print("Wait I'm in ftp")
        #
        path_in_app = f"files/results/{ftp_channel_name}/verifs"
        os.makedirs(path_in_app, exist_ok=True)
        #
        ftp_host = "172.16.205.1"
        ftp_port =  60000
        ftp_username = "SanoaMedia"
        ftp_password = "SanoaMedia"
        #
        remote_path = f"2M/verifs/{filename_in_ftp}"
        local_path = f"{path_in_app}/{filename_in_ftp}"
        #
        ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
        # Download a file from the server
        ftp.download_file(remote_path, local_path)

        if Path(local_path).exists():
            doc = xml2.parse(local_path)
            Spots = doc.getElementsByTagName("Spot")
            verComplete = doc.firstChild.getAttribute("verComplete")
            results = []
            for spot in Spots:
                trafficId = spot.getAttribute("trafficId")
                spotId  = spot.getAttribute("spotId")
                airTime = spot.getAttribute("airTime")
                newAirTime = airTime.replace("T", " ")
                newAirTime2 = newAirTime.replace("+02:00", "")
                airLength = spot.getAttribute("airLength")
                airStatusCode = spot.getAttribute("airStatusCode")
                version = spot.getAttribute("revision")
                try:
                    verif_to_update = Verifs.objects.get(
                        networkname = networkname,
                        zonename = region.id_zone_channel,
                        broadcastDate = broadcastdate,
                        trafficId = trafficId,
                        spotId = spotId
                    )
                    if verif_to_update:
                        verif_to_update.airTime = newAirTime2
                        verif_to_update.airLength = airLength
                        verif_to_update.airStatuscode = airStatusCode
                        verif_to_update.revision = version
                        verif_to_update.vercomplete = verComplete
                        verif_to_update.save()
                    else:
                        new_ad_verif = Verifs(
                            networkname = networkname,
                            zonename = region.id_zone_channel,
                            broadcastDate = broadcastdate,
                            trafficId = trafficId,
                            spotId = spotId,
                            airTime = newAirTime2,
                            airLength = airLength,
                            airStatuscode = airStatusCode,
                            revision = version,
                            vercomplete = verComplete
                        )
                        new_ad_verif.save()
                except Verifs.DoesNotExist:
                    print('oups')
                    new_ad_verif = Verifs(
                        networkname = networkname,
                        zonename = region.id_zone_channel,
                        broadcastDate = broadcastdate,
                        trafficId = trafficId,
                        spotId = spotId,
                        airTime = newAirTime2,
                        airLength = airLength,
                        airStatuscode = airStatusCode,
                        revision = version,
                        vercomplete = verComplete
                    )
                    new_ad_verif.save()
    #
    windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
    #
    for window in windows:
        window_dic = {}
        window_dic['i'] = i
        i = i+1
        window_dic['id_window'] = window.id_window
        window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
        window_start_formated_2 = window_start_formated.strftime("%H:%M")
        window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
        window_end_formated_2 = window_end_formated.strftime("%H:%M")
        window_dic['window_start'] = window_start_formated_2
        window_dic['window_end'] = window_end_formated_2
        window_dic['avails_in_win'] = []
        window_dic['num_of_avails'] = 0
        data["number_of_wins"] = i
        avails = Avails.objects.filter(id_window=window.id_window)
        j = 0
        for avail in avails:
            avail_dic = {}
            avail_dic["j"] = j
            j = j+1
            avail_dic["id_avail"] = avail.id_avail
            avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
            avail_start_formated2 = avail_start_formated.strftime("%H:%M")
            avail_dic["avail_start"] = avail_start_formated2
            avail_dic["adspots_in_avail"] = []
            window_dic["avails_in_win"].append(avail_dic)
            adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
            window_dic['num_of_avails'] = j
            k = 0
            avail_dic["num_of_adspots"] = 0
            for adspot in adspots:
                adspot_dic = {}
                adspot_dic["k"] = k
                k = k+1
                avail_dic["num_of_adspots"] = k
                adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                adspot_dic["id_adspot"] = adspot.id_adspot
                print("Wait I'm in database")
                try:
                    result = Verifs.objects.filter(broadcastDate = broadcastDate_verif, trafficId = adspot.trafficid, revision__lte = int(playlist.version)).latest('id_verif')
                    adspot_dic["airStatusCode"] = result.airStatuscode
                    adspot_dic["airTime"] = result.airTime
                except Verifs.DoesNotExist:
                    print('oups')
                else:
                    print("file not ready yet")
                avail_dic["adspots_in_avail"].append(adspot_dic)
        data["windows"].append(window_dic)
        record_inserted = 0
    #
    data_playlist = {
        'data':data["windows"],
        'playlist': playlist,
        'channels': channels,
        'region':region,
        'd':d,
        'number_of_wins':data["number_of_wins"],
        'record_inserted':record_inserted
    }
    return render(
        request,
        "DAIManagementApp/insertion_results.html",
        data_playlist
    )


@check_user
def insertion_results_getter(id_playlist):
    playlist = Playlists.objects.get(pk=id_playlist)
    channels = Channels.objects.get(id_channel=playlist.id_channel.id_channel)
    region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
    ftp_channel_name = channels.ftp_channel_name
    networkname = channels.networkname
    zonename = region.zonename
    broadcastdate = playlist.broadcastdate.replace("-","")
    result = Verifs.objects.filter(networkname=networkname,zonename=zonename,broadcastDate=broadcastdate).last()
    d = playlist.broadcastdate
    broadcastDate_verif = str(playlist.broadcastdate).replace("-","")
    win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)
    data = {}
    data["windows"] = []
    data["number_of_wins"] = 0
    i = 0
    record_inserted = 0
    if(result):
        verComplete = result.vercomplete
    else:
        verComplete = "false"
    if verComplete == "false":
    # if true:
        print("Wait I'm in ftp")
        filename_in_ftp = broadcastdate+"-"+zonename+"-00001.ver"
        path_in_ftp = "/"+ftp_channel_name+"/verifs/"
        path_in_app = "files/results/"+ftp_channel_name+"/verifs"
        if not os.path.exists(path_in_app):
            os.makedirs(path_in_app)
        # downloadFTP("uk06.tmd.cloud", "testftp@epgsano.com", "I?#=s3FfnSu_", "/2M/schedules/",  "test.txt" , "/var/www/html/DAI-Management/DAIManagement/FTP_files/")
        useraccess = Useraccess.objects.get(id_user=1)
        print(useraccess)
        downloadFTP(useraccess.ftp_server, useraccess.ftp_user, useraccess.ftp_password, path_in_ftp , filename_in_ftp, path_in_app)
        # def downloadFTP(host, user, password, filepath_inftp, file_inftp,  localpath):
        if Path(path_in_app+'/'+filename_in_ftp).exists():
            doc = xml2.parse(path_in_app+'/'+filename_in_ftp)
            Spots = doc.getElementsByTagName("Spot")
            verComplete = doc.firstChild.getAttribute("verComplete")
            results = []
            for spot in Spots:
                trafficId = spot.getAttribute("trafficId")
                spotId  = spot.getAttribute("spotId")
                airTime = spot.getAttribute("airTime")
                newAirTime = airTime.replace("T", " ")
                newAirTime2 = newAirTime.replace("+02:00", "")
                airLength = spot.getAttribute("airLength")
                airStatusCode = spot.getAttribute("airStatusCode")
                version = spot.getAttribute("revision")
                try:
                    verif_to_update = Verifs.objects.get(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId)
                    if verif_to_update:
                        verif_to_update.airTime = newAirTime2
                        verif_to_update.airLength = airLength
                        verif_to_update.airStatuscode = airStatusCode
                        verif_to_update.revision = version
                        verif_to_update.vercomplete = verComplete
                        verif_to_update.save()
                    else:
                        new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
                        new_ad_verif.save()
                except Verifs.DoesNotExist:
                    print('oups')
                    new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
                    new_ad_verif.save()
    windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
    for window in windows:
        window_dic = {}
        window_dic['i'] = i
        i = i+1
        window_dic['id_window'] = window.id_window
        window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
        window_start_formated_2 = window_start_formated.strftime("%H:%M")
        window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
        window_end_formated_2 = window_end_formated.strftime("%H:%M")
        window_dic['window_start'] = window_start_formated_2
        window_dic['window_end'] = window_end_formated_2
        window_dic['avails_in_win'] = []
        window_dic['num_of_avails'] = 0
        data["number_of_wins"] = i
        avails = Avails.objects.filter(id_window=window.id_window)
        j = 0
        for avail in avails:
            avail_dic = {}
            avail_dic["j"] = j
            j = j+1
            avail_dic["id_avail"] = avail.id_avail
            avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
            avail_start_formated2 = avail_start_formated.strftime("%H:%M")
            avail_dic["avail_start"] = avail_start_formated2
            avail_dic["adspots_in_avail"] = []
            window_dic["avails_in_win"].append(avail_dic)
            adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
            window_dic['num_of_avails'] = j
            k = 0
            avail_dic["num_of_adspots"] = 0
            for adspot in adspots:
                adspot_dic = {}
                adspot_dic["k"] = k
                k = k+1
                avail_dic["num_of_adspots"] = k
                adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                adspot_dic["id_adspot"] = adspot.id_adspot
                print("Wait I'm in database")
                try:
                    result = Verifs.objects.filter(broadcastDate = broadcastDate_verif, trafficId = adspot.trafficid, revision__lte = int(playlist.version)).latest('id_verif')
                    adspot_dic["airStatusCode"] = result.airStatuscode
                    adspot_dic["airTime"] = result.airTime
                except Verifs.DoesNotExist:
                    print('oups')
                else:
                    print("file not ready yet")
                avail_dic["adspots_in_avail"].append(adspot_dic)
        data["windows"].append(window_dic)
        record_inserted = 0
    data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"],'record_inserted':record_inserted}
    # return render(request, "DAIManagementApp/insertion_results.html", data_playlist)

# Updated With Static FTP Connection
@check_user
def update_playlist(request, id_playlist):
    if request.method == 'POST':
        apply_btn = request.POST.get("apply_btn")
        draft_btn = request.POST.get("draft_btn")
        if(apply_btn):
            id_user = request.session['id_user']
            useraccess = Useraccess.objects.get(id_user=id_user)
            channel_id = request.POST.get('channel_id')
            channeldata = Channels.objects.get(id_channel=channel_id)

            # zonename = request.POST.get('zonename')

            daydate = request.POST.get('day')
            number_of_windows = request.POST.get('numofwin')
            #return HttpResponse(number_of_windows)
            # channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
            version = request.POST.get('version')
            draft_version_old = request.POST.get('draft_version')

            daydate = datetime.datetime.strptime(str(daydate), '%Y-%m-%d')
            daydate = daydate.strftime('%Y-%m-%d')
            start_date = str(daydate) + "T00:01:00+00:00"
            end_date = str(daydate) + "T23:59:00+00:00"
            now = datetime.datetime.now()

            draft_version = request.POST.get('draft_version')

            playlist = Playlists.objects.get(pk=id_playlist)

            version_p = playlist.version
            broadcastdate_p = playlist.broadcastdate
            draft_version_p = playlist.draft_version
            start_date_p = playlist.start_date
            end_date_p = playlist.end_date

            # max_version = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('version')).get('version__max')

            max_version = Playlists.objects.filter(
                broadcastdate = f"{daydate}",
                id_zone_channel = playlist.id_zone_channel.id_zone_channel
            ).count()

            new_version = int(max_version)+1

            Playlist = Playlists(
                id_channel_id      = channel_id ,
                version            = str(new_version),
                broadcastdate      = str(daydate),
                start_date         = start_date,
                end_date           = end_date,
                creation_datetime  = now,
                # id_zone_channel_id = channel_zone.id_zone_channel,
                id_zone_channel_id = playlist.id_zone_channel.id_zone_channel,
                is_draft           = '0',
                draft_version      = draft_version_old
            )

            Playlist.save()
            traffic = 0
            record_inserted = 0
            var_test = 0
            for i in range(int(number_of_windows)):
                if request.POST.get('numofavails['+str(i)+']'):
                    numofavails = request.POST.get('numofavails['+str(i)+']')
                    window_start = request.POST.get('windowstart['+str(i)+']')
                    window_start = daydate+' '+window_start+':00'
                    window_end = request.POST.get('winend_['+str(i)+']')
                    window_end = daydate+' '+window_end+':00'
                    FMT = '%Y-%m-%d %H:%M:%S'
                    window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                    window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                    window_duration = window_duration.strftime('%H%M%S00')
                    Window = Windows(
                        id_playlist_id  = Playlist.id_playlist,
                        window_start    = window_start,
                        window_end      = window_end,
                        window_duration = window_duration
                    )
                    Window.save()

                    for j in range(int(numofavails)):
                        if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                            av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                            av_start = daydate+' '+av_start+':00'
                            number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                            Avail = Avails(
                                id_window_id  = Window.id_window,
                                avail_start   = av_start,
                                availinwindow = str(j+1),
                                datetime      = now
                            )
                            Avail.save()
                            for k in range(int(number_of_ads)):
                                if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                    adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                    traffic +=1
                                    AdspotsInAv = AdspotsInAvail(
                                        id_avail_id = Avail.id_avail,
                                        id_adspot_id = adspot,
                                        positioninavail = str(k+1),
                                        trafficid = traffic
                                    )
                                    AdspotsInAv.save()
                                    record_inserted = 1

            last_id_playlist = Playlist.id_playlist

            # xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, str(new_version),draft_version_old)
            # path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
            # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)

            xml_playlist_res = generateSchedule(Playlist)
            print(xml_playlist_res)
            # Check the result
            if xml_playlist_res["status"]:
                ftp_host = "172.16.205.1"
                ftp_port =  60000
                ftp_username = "SanoaMedia"
                ftp_password = "SanoaMedia"
                local_file_path = xml_playlist_res["file"]
                remote_file_path = None
                if Playlist.id_zone_channel.networkname == "REGNL":
                    remote_file_path = "2M/schedules/REG1_NL"
                elif Playlist.id_zone_channel.networkname == "2M":
                    remote_file_path = "2M/schedules/REG1_FR"
                if remote_file_path:
                    # uploadFTP(ftp_server, ftp_port, ftp_user, ftp_password, xml_file_path , ftp_upload_path)
                    ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
                    # Upload a file to the server
                    ftp.upload_file(local_file_path, remote_file_path)
            else:
                print(f"Failed to generate playlist file. Error: {xml_playlist_res['message']}")

            activity = Activity(
                activity    = "Update PlayList",
                date        = datetime.datetime.now(),
                description = f"Admin Update PlayList id: { Playlist.id_playlist }"
            )
            activity.save()

            ############ Display data on view_playlist_id page ##################
            return redirect('view_playlist', id_playlist=last_id_playlist)
        else:
                        useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
                        channel_id = request.POST.get('channel_id')
                        channeldata = Channels.objects.get(id_channel=channel_id)
                        zonename = request.POST.get('zonename')
                        daydate = request.POST.get('day')
                        number_of_windows = request.POST.get('numofwin')
                        channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
                        playlist = Playlists.objects.get(pk=id_playlist)

                        version_p = playlist.version
                        broadcastdate_p = playlist.broadcastdate
                        draft_version_p = playlist.draft_version
                        start_date_p = playlist.start_date
                        end_date_p = playlist.end_date

                        max_version_draft = Playlists.objects.filter(broadcastdate=broadcastdate_p).aggregate(Max('draft_version')).get('draft_version__max')
                        new_version_draft = max_version_draft

                        #daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
                        #daydate = daydate.strftime('%Y-%m-%d')
                        start_date = str(daydate) + "T00:01:00+00:00"
                        end_date = str(daydate) + "T23:59:00+00:00"
                        now = datetime.datetime.now()
                        Playlist = Playlists(id_channel_id=channel_id ,version=version_p, broadcastdate=broadcastdate_p, start_date=start_date_p, end_date=end_date_p, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel,is_draft='1',draft_version=new_version_draft+1)
                        Playlist.save()
                        traffic = 0
                        record_inserted = 0
                        for i in range(int(number_of_windows)):
                            if request.POST.get('numofavails['+str(i)+']'):
                                numofavails = request.POST.get('numofavails['+str(i)+']')
                                window_start = request.POST.get('windowstart['+str(i)+']')
                                window_start = daydate+' '+window_start+':00'
                                window_end = request.POST.get('winend_['+str(i)+']')
                                window_end = daydate+' '+window_end+':00'
                                FMT = '%Y-%m-%d %H:%M:%S'
                                window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                                window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                                window_duration = window_duration.strftime('%H%M%S00')
                                Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                                Window.save()

                                for j in range(int(numofavails)):
                                    if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                                        av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                                        av_start = daydate+' '+av_start+':00'
                                        number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                                        Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                                        Avail.save()

                                        for k in range(int(number_of_ads)):
                                            if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                                adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                                traffic +=1
                                                AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                                                AdspotsInAv.save()
                        # xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, '1' )
                        # path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
                        # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)
                        last_id_playlist = Playlist.id_playlist

                        ############ Display data on view_playlist_id page ##################
                        return redirect('draft_playlist')



@check_user
def view_playlist(request, id_playlist):
    playlist = Playlists.objects.get(pk=id_playlist)
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
    d = playlist.broadcastdate

    win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

    data = {}
    data["windows"] = []
    data["number_of_wins"] = 0
    i = 0
    record_inserted = 0
    windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
    for window in windows:
        window_dic = {}
        window_dic['i'] = i
        i = i+1
        window_dic['id_window'] = window.id_window
        window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
        window_start_formated_2 = window_start_formated.strftime("%H:%M")
        window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
        window_end_formated_2 = window_end_formated.strftime("%H:%M")
        window_dic['window_start'] = window_start_formated_2
        window_dic['window_end'] = window_end_formated_2
        window_dic['avails_in_win'] = []
        window_dic['num_of_avails'] = 0
        data["number_of_wins"] = i
        avails = Avails.objects.filter(id_window=window.id_window)
        j = 0
        for avail in avails:
            avail_dic = {}
            avail_dic["j"] = j
            j = j+1
            avail_dic["id_avail"] = avail.id_avail

            avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
            avail_start_formated2 = avail_start_formated.strftime("%H:%M")

            avail_dic["avail_start"] = avail_start_formated2
            avail_dic["adspots_in_avail"] = []
            window_dic["avails_in_win"].append(avail_dic)
            adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
            window_dic['num_of_avails'] = j
            k = 0
            avail_dic["num_of_adspots"] = 0
            for adspot in adspots:
                adspot_dic = {}
                adspot_dic["k"] = k
                k = k+1
                avail_dic["num_of_adspots"] = k
                adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                adspot_dic["id_adspot"] = adspot.id_adspot
                avail_dic["adspots_in_avail"].append(adspot_dic)
        data["windows"].append(window_dic)
        record_inserted = 0


    data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"],'record_inserted':record_inserted}
    return render(request, "DAIManagementApp/view_playlist_id.html", data_playlist)

@check_user
def duplicate_playlist(request,id_playlist="1"):
    if request.method == 'POST':
        playlist = Playlists.objects.get(pk=id_playlist)
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
        d = playlist.broadcastdate

        win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

        data = {}
        data["windows"] = []
        data["number_of_wins"] = 0
        i = 0

        windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
        for window in windows:
            window_dic = {}
            window_dic['i'] = i
            i = i+1
            window_dic['id_window'] = window.id_window
            window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
            window_start_formated_2 = window_start_formated.strftime("%H:%M")
            window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
            window_end_formated_2 = window_end_formated.strftime("%H:%M")
            window_dic['window_start'] = window_start_formated_2
            window_dic['window_end'] = window_end_formated_2
            window_dic['avails_in_win'] = []
            window_dic['num_of_avails'] = 0
            data["number_of_wins"] = i
            avails = Avails.objects.filter(id_window=window.id_window)
            j = 0
            for avail in avails:
                avail_dic = {}
                avail_dic["j"] = j
                j = j+1
                avail_dic["id_avail"] = avail.id_avail
                avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                avail_dic["avail_start"] = avail_start_formated2
                avail_dic["adspots_in_avail"] = []
                window_dic["avails_in_win"].append(avail_dic)
                adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                window_dic['num_of_avails'] = j
                k = 0
                avail_dic["num_of_adspots"] = 0
                for adspot in adspots:
                    adspot_dic = {}
                    adspot_dic["k"] = k
                    k = k+1
                    avail_dic["num_of_adspots"] = k
                    adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                    adspot_dic["id_adspot"] = adspot.id_adspot
                    avail_dic["adspots_in_avail"].append(adspot_dic)
            data["windows"].append(window_dic)


        data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
        return render(request, "DAIManagementApp/duplicate_playlist_id.html", data_playlist)
    else:
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels).order_by('-id_playlist')
        data = {'playlists': playlists}
        return render(request, "DAIManagementApp/duplicate_playlist.html",data)

@check_user
def duplicate_playlist_update(request, id_playlist):
    if request.method == 'POST':
        id_user = request.session['id_user']
        useraccess = Useraccess.objects.get(id_user_id=id_user)
        channel_id = request.POST.get('channel_id')
        channeldata = Channels.objects.get(id_channel=channel_id)
        zonename = request.POST.get('zonename')
        daydate = request.POST.get('day')
        number_of_windows = request.POST.get('numofwin')
        #return HttpResponse(number_of_windows)
        channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
        version = request.POST.get('version')

        daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
        daydate = daydate.strftime('%Y-%m-%d')
        start_date = str(daydate) + "T00:01:00+00:00"
        end_date = str(daydate) + "T23:59:00+00:00"
        now = datetime.datetime.now()

        # max_version = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('version')).get('version__max')
        filtered_playlists = Playlists.objects.filter(broadcastdate=str(daydate)).order_by('-creation_datetime')
        last_playlist = filtered_playlists.first()
        last_playlist_version = int(last_playlist.version) if (last_playlist and hasattr(last_playlist, 'version') and last_playlist.version is not None) else 1

        Playlist = Playlists(
            id_channel_id=channel_id ,
            version=(last_playlist_version+1),
            broadcastdate=str(daydate),
            start_date=start_date,
            end_date=end_date,
            creation_datetime=now,
            id_zone_channel_id=channel_zone.id_zone_channel,
            is_draft='0',
            draft_version='0'
        )
        Playlist.save()
        traffic = 0
        record_inserted = 0
        var_test = 0
        for i in range(int(number_of_windows)):
            if request.POST.get('numofavails['+str(i)+']'):
                numofavails = request.POST.get('numofavails['+str(i)+']')
                window_start = request.POST.get('windowstart['+str(i)+']')
                window_start = daydate+' '+window_start+':00'
                window_end = request.POST.get('windowend['+str(i)+']')
                window_end = daydate+' '+window_end+':00'
                FMT = '%Y-%m-%d %H:%M:%S'
                window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                window_duration = window_duration.strftime('%H%M%S00')
                Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                Window.save()

                for j in range(int(numofavails)):
                    if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                        av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                        av_start = daydate+' '+av_start+':00'
                        number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                        Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                        Avail.save()

                        for k in range(int(number_of_ads)):
                            if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                traffic +=1
                                AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                                AdspotsInAv.save()
                                record_inserted = 1
        last_id_playlist = Playlist.id_playlist
        xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, f"{(last_playlist_version+1)}")
        path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
        # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)

        ############ Display data on view_playlist_id page ##################
        return redirect('view_playlist', id_playlist=last_id_playlist)



# fonction manipulations les fichiers logs
def insert_logs(file_path):

    file = open(file_path, 'r')
    lines = file.read().splitlines()
    count = 0
    for line in lines :
        l = line.split('|')
        #print(l)

        if l[1].strip() == 'info':
            msg = l[2].strip().split(' ')
            if "Detected" == msg[0] :
                #print(l[2].strip())
                date = l[0].strip().replace(',' , '')
                day = date.split(' ')[0]
                day =date_time(day.replace('/','-'))
                time = date.split(' ')[1]
                if time.split(':')[0]== '24':
                    time = '00' + time[2:]
                if count == 0:
                    first_day = day

                datetime = str(day + str(' ') + time)
                #datetime = datetime.datetime.strptime(datetime, '%Y-%m-%d %H:%M:%S')

            # print(date)
                info = l[2].split(',')
                #print(info[0])
                channel  = info[0].split(' ')[-1]
                ch = channel.split('(')[0]
                id = channel.split(":")[1].replace(')','')
                duration = info[2].split(' ')[3]


                #print("Date : "+ datetime + " | Channel : "+ ch + " | ID : "+ id   +" | Duration = " + duration)
                count +=1

                print(str(count) + " Date : "+ datetime)
                channel = Channels.objects.get(pk=id)
                log = Adbreak_history.objects.update_or_create(id_channel=channel , datetime=datetime , day=day, time=time,channel_name=ch,duration=duration)
                #log.save()

    file.close()
    print(count)
    return first_day

@check_user
def logs(request):
    print(request.method)
    if request.method == "POST":
        print(str(request.FILES['logs_file']))
        #preparer le chemin stocker fichier logs

        path = "logs/user_"+str(request.session['id_user'])
        filename = str(request.FILES['logs_file'])
        if not os.path.exists(path):
                print("path not  exist")
                os.makedirs(path)
        else :
                print("path exist ")
        handle_uploaded_file(request.FILES['logs_file'],  path ,filename)
        # traiter ficher
        day = insert_logs(path+'/'+filename)
        # Predir day prochain
        #predict_adbreak(day)

    yesterday = datetime.datetime.now() - timedelta(days=15)
    logs = AdbreakHistory.objects.filter(datetime__gte= yesterday )
    return render(request,'DAIManagementApp/add_logs.html' , {'logs':logs})

def date_time(datetime):
    date = datetime.split('-')
    date=date[2]+'-'+date[0]+'-'+date[1]
    return date

@check_user
def get_logs(request):
    method = 'get'
    chanels = Channels.objects.all()
    if request.method == 'POST':
        ch = request.POST.get('channel_id')
        day = request.POST.get('day')
        print( " ID : " + ch + " DAY : " + day)

        channel = Channels.objects.get(pk=ch)
        logs = Adbreak_history.objects.filter(id_channel=channel,day=day)
        method = 'post'
        data = {
                'logs':logs,
                'channels':chanels,
                'method' : method
            }
    else :
            data = {

                'channels':chanels,
                'method' : method
            }
    return render(request,"DAIManagementApp/get_logs.html",{'data':data})


@check_user
def add_campaign(request):
    if request.method == 'POST':
        # Part 1
        name = request.POST.get('campaign_name')
        id_brand = request.POST.get('id_brand')
        id_adpost = request.POST.get('id_adpost')
        pacing = request.POST.get('pacing')

        # Part data
        start_date = request.POST.get('start_day')
        end_date = request.POST.get('end_day')

        # part cpm
        pacing  = request.POST.get('pacing')
        volume  = request.POST.get('volume')

        delivery  = request.POST.get('delivery')
        if delivery == None:
            delivery = 0
        cpm     = request.POST.get('cpm')

        # Part placement
        placement = []
        time = DayTime.objects.all()
        for i in time:
            placement.append(request.POST.get(i.name))


        #Insert
        brand = Brands.objects.get(pk=id_brand)
        adpost = Adspots.objects.get(pk=id_adpost)

        campaign = Campaigns(name=name,id_advertiser=adpost,id_brand=brand,pacing=pacing,start_day=start_date,end_day=end_date,
                            volume=int(volume) , delivery=int(delivery) , cpm=float(cpm))
        campaign.save()
        now = datetime.datetime.now()
        activite = 'Add Campaign'
        desc = 'Admin Add Campaign  id: ' + str(campaign.id_campaign)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        print(campaign.id_campaign)
        campaign = Campaigns.objects.get(pk=str(campaign.id_campaign))
        for i in placement :
            if i != None:
                time = DayTime.objects.get(pk=i)

                p = Placement(id_time=time ,id_campaign=campaign)
                p.save()
        campaign = Campaigns.objects.all()
        return render(request,'DAIManagementApp/viwes_campaigns.html',{'campaigns': campaign})

    #brands = Brands.objects.all()
    brands = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1")
    channels= Channels.objects.filter(id_user=request.session['id_user'])
    adspots = Adspots.objects.filter(id_channel__in=channels)
    print(len(adspots))
    datetime = DayTime.objects.all()
    data = {
        'brands'  : brands,
        'adspots' : adspots,
        'datetime': datetime
    }
    return render(request,'DAIManagementApp/add_campaign.html', {'data':data})

@check_user
def add_campaign_new(request):
    from datetime import datetime
    if request.method == 'POST':
        # campaign info:
        name = request.POST.get('campaign_name')             # campaign name
        id_agency = request.POST.get('id_agency')            # campaign agency
        id_brand = request.POST.get('id_brand')              # campaign Advertiser
        product = request.POST.get('product_name')           # campaign product
        category = request.POST.get('id_category')           # campaign product
        country = request.POST.get('id_country')             # campaign product
        # campaign periode info:  Part date (start_end)      #
        start_date = request.POST.get('start_day')           # campaign start date
        end_date = request.POST.get('end_day')               # campaign end date
        # # campaign cpm info:                               #
        budget = request.POST.get('budget')                  # campaign budget
        volume = request.POST.get('volume')                  # campaign target impression
        cpm = request.POST.get('cpm')                        # campaign cpm
        broadcasts_day = request.POST.get('broadcasts_day')  # campaign min broadcasts pre day
        # campaign media planning info:                      #
        rotation = request.POST.get('general_rotation')      # campaign media planning rotation aliatoire
        media_type = request.POST.get('media_type')          # campaign media planning media type
        program_category = request.POST.getlist('program_categories') # campaign media planning program category
        pacing  = request.POST.get('pacing')                 # campaign media planning pacing
        # vast_checkbox = True if 'vastCheckbox' in request.POST else False
        # vast_data = request.POST.get('vast_url', '') if vast_checkbox else None

        delivery = ""
        if pacing == "0":
            delivery = 0
        else:
            #
            start_date1 = datetime.strptime(str(start_date), '%Y-%m-%d')
            end_date1 = datetime.strptime(str(end_date), '%Y-%m-%d')
            print(str((end_date1-start_date1).days))
            #
            delivery = int(volume)/int(str((end_date1-start_date1).days))

        campaign = Campaigns(
            name=name,
            id_brand = Brands.objects.get(pk=id_brand),
            id_agency = Agency.objects.get(pk=id_agency),
            product = product,
            category = category,
            country = country,
            start_day = start_date,
            end_day = end_date,
            budget = float(budget),
            volume = int(volume),
            delivery = float(delivery),
            cpm = float(cpm),
            broadcasts_day = int(broadcasts_day),
            media_type = media_type,
            program_category = program_category,
            pacing=pacing,
            general_rotation=int(rotation),
            id_user_id=request.session['id_user'],
            booked="0",
            # is_vast = vast_checkbox,
            # vast_data = vast_data,
            status=CampaignStatus.Prebooked
        )
        campaign.save()

        channels= Channels.objects.filter(id_user=request.session['id_user'])
        if(rotation=='0'):
            for channel in channels:
                channel_placements = request.POST.getlist('check_placement_'+str(channel.id_channel))
                if channel_placements != None:
                    for placement_of_channel in channel_placements:
                        time = DayTime.objects.get(pk=int(placement_of_channel))
                        placement = Placement(id_time=time ,id_campaign=campaign , id_channel=channel)
                        placement.save()

            # agency = Agency.objects.filter(id_user=request.session['id_user'])
            # advertiser = Brands.objects.filter(id_agency__in=agency)
            # channels= Channels.objects.filter(id_user=request.session['id_user'])
            # adspots = Adspots.objects.filter(id_channel__in=channels)
            # datetime = DayTime.objects.all()
            # # brands = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").filter(id_brand__in=advertiser)
            # data = {
            #     'brands'  : advertiser,
            #     'agency'  : agency,
            #     'adspots' : adspots,
            #     'channels': channels,
            #     'datetime': datetime
            # }
            # return render(request,'DAIManagementApp/add_campaign_new.html', {'data':data})
            activite = 'Add Campaign'
            desc = 'Admin Add Campaign  id: ' + str(campaign.id_campaign)
            now = datetime.now()
            activity = Activity(activity=activite , date=now ,description=desc )
            activity.save()

            campaigns = Campaigns.objects.filter(id_user_id=request.session['id_user']).order_by('-id_campaign')
            # return render(request , 'DAIManagementApp/edit_campaign.html',{'campaigns':campaigns})
            return redirect('add_creative')
        else:
            channels_checked = request.POST.getlist('channels_checked')
            if channels_checked != None:
                for channel in channels_checked:
                    time = DayTime.objects.get(id_time=13)
                    placement = Placement(id_time=time,id_campaign=campaign , id_channel_id=channel)
                    placement.save()
            campaigns = Campaigns.objects.filter(id_user_id=request.session['id_user']).order_by('-id_campaign')
            # return render(request , 'DAIManagementApp/edit_campaign.html',{'campaigns':campaigns})
            return redirect('add_creative')

    agency = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0)
    advertiser = Brands.objects.filter(id_agency__in=agency)
    channels= Channels.objects.filter(id_user=request.session['id_user'])
    advertisers_for_creative = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").select_related('id_brand').all()
    adspots = Adspots.objects.filter(id_channel__in=channels)
    # datetime = DayTime.objects.all()
    datetime = DayTime.objects.exclude(id_time = 13)
    # brands = UsersAdvertisers.objects.filter(id_user=request.session['id_user']).filter(status="1").filter(id_brand__in=advertiser)
    data = {
        'brands'  : advertiser,
        'agency'  : agency,
        'adspots' : adspots,
        'channels': channels,
        'datetime': datetime,
        'advertisers_for_creative':advertisers_for_creative
    }
    return render(request,'DAIManagementApp/add_campaign_new.html', {'data':data})

@check_user
def load_creatives(request):
    advertiser_id = request.GET.get('advertiser')
    # advertiser_id = '28'
    # adspots = Adspots.objects.filter(id_brand=advertiser_id)
    adspots = Adspots.objects.raw('SELECT * FROM Adspots where id_brand = "'+advertiser_id+'" GROUP BY ad_group')


    return render(request,'DAIManagementApp/dropdown_adspots_campaign.html',{'adspots': adspots})

@check_user
def load_advertisers(request):
    agency_id = request.GET.get('agency')
    advertisers = Brands.objects.filter(id_agency=agency_id)
    return render(request,'DAIManagementApp/dropdown_advertisers_campaign.html',{'brands': advertisers})

@check_user
def load_agency(request):
    agency = Agency.objects.filter(id_user_id=request.session['id_user'],is_deleted=0)
    return render(request,'DAIManagementApp/dropdown_agency_campaign.html',{'agency': agency})

@check_user
def load_creative(request):
    channels= Channels.objects.filter(id_user=request.session['id_user'])
    adspots = Adspots.objects.filter(id_channel__in=channels)
    return render(request,'DAIManagementApp/dropdown_creative_campaign.html',{'adspots': adspots})

@check_user
def send_advertisers(request):
    advertiser_name = request.POST.get('name')
    print(advertiser_name)
    category = request.POST.get('category')
    print(category)
    advertiser_desc = request.POST.get('description')
    active_0_1 = request.POST.get('status')
    now = datetime.datetime.now()
    path = "static/Advertisers_imgs"
    now_infile = str(now).replace(" ", "_")
    now_infile = now_infile.replace(":", "-")
    filename = advertiser_name+"__"+now_infile+".png"
    # handle_uploaded_file(request.FILES['logo'], path, filename)
    agency = request.POST.get('agency')
    agency = Agency.objects.get(id_agency=agency)
    Brand = Brands(id_agency=agency,brand_name=advertiser_name ,description=advertiser_desc, category=category, status=active_0_1, logo=path+'/'+filename)
    Brand.save()
    Advertiser_user = UsersAdvertisers(id_user_id=request.session['id_user'], id_brand_id=Brand.id_brand, status=active_0_1, datetime=now)
    Advertiser_user.save()
    return render(request,'DAIManagementApp/dropdown_advertisers_campaign.html')

@check_user
def send_agency(request):
    name_agency = request.POST.get('name_agency')
    print(name_agency)
    description_agency = request.POST.get('description_agency')
    print(description_agency)
    now = datetime.datetime.now()
    agency = Agency(id_user_id=request.session['id_user'],name=name_agency,description=description_agency,datetime=now, is_deleted=0)
    agency.save()
    return render(request,'DAIManagementApp/dropdown_agency_campaign.html')

@check_user
def send_creative(request):
    channel_id = request.POST.get('channel_id')
    advertiser_id = request.POST.get('advertiser_id')
    adspot_name = request.POST.get('adspot_name')
    adspot_duration = request.POST.get('adspot_duration')
    active_0_1 = request.POST.get('active_0_1')
    now = datetime.datetime.now()
    now_infile = str(now).replace(" ", "_")
    now_infile = now_infile.replace(":", "-")
    path = "adspots/user_"+str(request.session['id_user'])+"/ch_"+str(channel_id)
    if not os.path.exists("static/"+path):
        os.makedirs("static/"+path)

    new_now_infile = now_infile.replace(".","-")
    filename = "ch_"+str(channel_id)+"__"+adspot_name+"__"+str(adspot_duration)+"sec__"+new_now_infile+".ts"
    print(filename)
    handle_uploaded_file(request.FILES['adspot_file'], "static/"+path, filename)
    creative = Adspots(id_channel_id=channel_id ,id_brand_id=advertiser_id, adspot_name=adspot_name, duration=adspot_duration, status=active_0_1, original_filepath=path+'/'+filename ,  datetime=now,filename=filename)
    creative.save()
    return render(request,'DAIManagementApp/dropdown_agency_campaign.html')

@check_user
def disable_campaign(request,id_campaign=""):

    if request.method == 'POST' :
        campaign = Campaigns.objects.get(pk=id_campaign)
        campaign.booked = campaign.booked == False
        campaign.save()
        now = datetime.datetime.now()
        activite = 'Disable Campaign'
        desc = 'Admin Disable Campaign  id: ' + str(id_campaign)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
    campaigns = Campaigns.objects.filter(id_user_id=request.session['id_user']).order_by('-id_campaign')
    return render(request,'DAIManagementApp/disable_campaign.html',{'campaigns':campaigns})

@check_user
def edit_campaign(request , id_campaign=""):
    from datetime import datetime
    if request.method == 'POST':
        edit_btn = request.POST.get("edit_btn")
        view_btn = request.POST.get("view_btn")
        if (edit_btn):
            # campaign data in db
            campaign = Campaigns.objects.get(pk=id_campaign)

            # campaign data inserted
            name = request.POST.get('campaign_name')             # campaign name
            id_agency = request.POST.get('id_agency')            # campaign agency
            id_brand = request.POST.get('id_brand')              # campaign Advertiser
            product = request.POST.get('product_name')           # campaign product
            category = request.POST.get('id_category')           # campaign product
            country = request.POST.get('id_country')             # campaign product
            # campaign periode info:  Part date (start_end)      #
            start_date = request.POST.get('start_day')           # campaign start date
            end_date = request.POST.get('end_day')               # campaign end date
            # # campaign cpm info:                               #
            budget = request.POST.get('budget')                  # campaign budget
            volume = request.POST.get('volume')                  # campaign target impression
            cpm = request.POST.get('cpm')                        # campaign cpm
            broadcasts_day = request.POST.get('broadcasts_day')  # campaign min broadcasts pre day
            # campaign media planning info:                      #
            rotation = request.POST.get('general_rotation')      # campaign media planning rotation aliatoire
            media_type = request.POST.get('media_type')          # campaign media planning media type
            program_category = request.POST.getlist('program_categories') # campaign media planning program category
            pacing  = request.POST.get('pacing')                 # campaign media planning pacing

            delivery = ""
            if pacing == "1" and start_date != None and end_date != None :
                start_date1 = datetime.strptime(str(start_date), '%Y-%m-%d')
                end_date1 = datetime.strptime(str(end_date), '%Y-%m-%d')
                print(str((end_date1-start_date1).days))
                #
                delivery = int(volume)/int(str((end_date1-start_date1).days))
            else:
                delivery = 0

            if delivery and delivery != campaign.delivery:
                campaign.delivery = delivery
            if name != "" and name != None and name != campaign.name:
                campaign.name = name
            if id_brand != "" and id_brand != None and id_brand != campaign.id_brand.id_brand :
                campaign.id_brand = Brands.objects.get(pk=id_brand)
            if id_agency != "" and id_agency != None and id_agency != campaign.id_agency.id_agency :
                campaign.id_agency = Agency.objects.get(pk=id_agency)
            if product != "" and product != None and product != campaign.product:
                campaign.product = product
            if category != "" and category != None and category != campaign.category:
                campaign.category = category
            if country != "" and country != None and country != campaign.country:
                campaign.country = country
            if start_date != "" and start_date != None and start_date != campaign.start_day:
                campaign.start_day = start_date
            if end_date != "" and end_date != None and end_date != campaign.end_day:
                campaign.end_day = end_date
            if budget != "" and budget != None and float(budget) and float(budget) != campaign.budget:
                campaign.budget = float(budget)
            if volume != "" and budget != None and int(volume) and int(volume) != campaign.budget:
                campaign.volume = int(volume)
            if delivery != "" and delivery != None and float(delivery) and float(delivery) != campaign.delivery:
                campaign.delivery = float(delivery)
            if cpm != "" and cpm != None and float(cpm) and float(cpm) != campaign.cpm:
                campaign.cpm = float(cpm)
            if broadcasts_day != "" and broadcasts_day != None and int(broadcasts_day) and int(broadcasts_day) != campaign.broadcasts_day:
                campaign.broadcasts_day = int(broadcasts_day)
            if media_type != "" and media_type != None and media_type != campaign.media_type:
                campaign.media_type = media_type
            if program_category != "" and program_category != None and program_category != campaign.program_category:
                campaign.program_category = program_category
            if pacing != "" and pacing != None and pacing != campaign.pacing:
                campaign.pacing = pacing
            if rotation != "" and rotation != None and int(rotation) and int(rotation) != campaign.rotation:
                campaign.rotation = int(rotation)
            campaign.save()

            brands_for_agency = Brands.objects.get(id_brand=campaign.id_brand.id_brand)
            agency_by_brand = Agency.objects.get(id_agency=brands_for_agency.id_agency.id_agency)

            agencies = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0)
            advertisers = Brands.objects.filter(id_agency__in=agencies)

            channels= Channels.objects.filter(id_user=request.session['id_user'])
            adspots = Adspots.objects.filter(id_channel__in=channels)
            datetime1 = DayTime.objects.exclude(id_time = 13)
            placement_channel = Placement.objects.filter(id_campaign=campaign.id_campaign).values_list('id_channel')
            placement_channel = [x[0] for x in list(placement_channel)]

            channels_and_placement = {} # or []
            tab_channels_and_placement = []
            ch_n = 0
            for channel in channels:
                # we will get the placement of 1 channel here:
                placement = Placement.objects.filter(id_campaign=campaign.id_campaign, id_channel=channel).values_list('id_time')
                placement = [x[0] for x in list(placement)]
                # here will append the array channels_and_placement with the value of the channel and the placement to use it in the html
                channels_and_placement[ch_n] = {"channel_name":channel.channel_name, "channel_id": channel.id_channel, "placement": placement}
                tab_channels_and_placement.append(channels_and_placement[ch_n])
                ch_n +=1

            placement = Placement.objects.filter(id_campaign=campaign.id_campaign).values_list('id_time')
            placement = [x[0] for x in list(placement)]

            print(channels_and_placement)
            now = datetime.now()
            activite = 'Edit Campaign'
            desc = 'Admin Edit Campaign  id: ' + str(id_campaign)
            activity = Activity(activity=activite , date=now ,description=desc )
            activity.save()
            data = {
                'campaign':campaign,
                'agency_by_brand':agency_by_brand,
                'channels':channels,
                'datetime':datetime1,
                'placement_channel':placement_channel,
                'channels_and_placement':tab_channels_and_placement,
                'placement':placement,
                'agencies': agencies,
                'advertisers':advertisers

            }
            return render(request , 'DAIManagementApp/edit_campaign_id.html',{'data':data})
        elif(view_btn):
            return redirect('view_campaign', id_campaign=id_campaign)
        else:
            return redirect('load_excel', id_campaign=id_campaign)

    campaigns = Campaigns.objects.filter(id_user_id=request.session['id_user']).order_by('-id_campaign')

    no_vast_query_old = """
        SELECT
            Campaigns.id_campaign,
            Count(Verifs.airStatusCode) as total_ads,
            SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
        FROM
            Campaigns
        LEFT JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
        LEFT JOIN Verifs ON Verifs.spotId = Adspots.filename
        LEFT JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
        AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
        GROUP BY Campaigns.id_campaign
    """

    no_vast_query = """
        SELECT
            Campaigns.id_campaign,
            Count(Verifs.airStatusCode) as total_ads,
            SUM(SFR_analytics.purcent*1.25*4500000/17) as Total_impressions
        FROM
            Campaigns
        INNER JOIN Adspots ON Campaigns.id_campaign = Adspots.id_campaign
        INNER JOIN Verifs ON Verifs.spotId = Adspots.filename
        INNER JOIN SFR_analytics ON SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5)
        AND SUBSTRING(Verifs.airTime, 1, 10) = SUBSTRING(SFR_analytics.`day`, 1, 10)
        AND SFR_analytics.sfr_channel_name = '2M Maroc'
        WHERE Adspots.id_campaign is not null AND Verifs.airStatusCode= 0001
        AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
        GROUP BY Campaigns.id_campaign
    """

    vast_query = """
        SELECT
            Campaigns.id_campaign,
            COALESCE(Ads.total_ads, 0) as total_ads,
            COALESCE(Impressions.total_impression, 0) as total_impression
        FROM
            Campaigns
        LEFT JOIN (
            SELECT
                Campaigns.id_campaign,
                COUNT(Verifs.airStatusCode) as total_ads
            FROM
                Campaigns
            LEFT JOIN
                Adspots ON Campaigns.id_campaign = Adspots.id_campaign
            LEFT JOIN
                Verifs ON Verifs.spotId = Adspots.filename
            WHERE
                Adspots.id_campaign is not null and Verifs.airStatusCode= 0001
                AND SUBSTRING(Verifs.airTime, 1, 10) > Campaigns.start_day
                AND SUBSTRING(Verifs.airTime, 1, 10) < Campaigns.end_day
            GROUP BY
                Campaigns.id_campaign
        ) Ads ON Campaigns.id_campaign = Ads.id_campaign
        LEFT JOIN (
            SELECT
                Campaigns.id_campaign,
                COUNT(VAST_response.AD_id) as total_impression
            FROM
                Campaigns
            LEFT JOIN
                VAST_response ON Campaigns.vast_data = VAST_response.vast_url
                AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) > Campaigns.start_day
                AND SUBSTRING(VAST_response.datetime_timestamp, 1, 10) < Campaigns.end_day
            WHERE
                Campaigns.vast_data is not null
            GROUP BY
                Campaigns.id_campaign
        ) Impressions ON Campaigns.id_campaign = Impressions.id_campaign
        WHERE Campaigns.vast_data is not null
    """
    from django.db import connection
    from .utils import NoVastResult, VastResult

    # Execute the no_vast_query
    with connection.cursor() as cursor:
        cursor.execute(no_vast_query)
        no_vast_results = [NoVastResult(*row) for row in cursor.fetchall()]

    # Execute the vast_query
    with connection.cursor() as cursor:
        cursor.execute(vast_query)
        vast_results = [VastResult(*row) for row in cursor.fetchall()]



    now =datetime.now()

    start = []

    for i in campaigns:
                try :
                    date = datetime.strptime(i.start_day , '%Y-%m-%d')
                    if now > date:
                        start.append(i.id_campaign)
                except :
                    pass
    return render(
        request ,
        'DAIManagementApp/edit_campaign.html',
        {
            'campaigns':campaigns ,
            'start' : start,
            'vast_results': vast_results,
            'no_vast_results': no_vast_results
        }
    )

@check_user
def view_campaign(request, id_campaign='15'):
    campaign = Campaigns.objects.get(pk=id_campaign)
    brands_for_agency = Brands.objects.get(id_brand=campaign.id_brand.id_brand)
    agency_by_brand = Agency.objects.get(id_agency=brands_for_agency.id_agency.id_agency)

    channels= Channels.objects.filter(id_user=request.session['id_user'])
    adspots = Adspots.objects.filter(id_channel__in=channels)
    datetime = DayTime.objects.exclude(id_time = 13)
    placement_channel = Placement.objects.filter(id_campaign=campaign.id_campaign).values_list('id_channel')
    placement_channel = [x[0] for x in list(placement_channel)]

    channels_and_placement = {} # or []
    tab_channels_and_placement = []
    ch_n = 0
    for channel in channels:
        # we will get the placement of 1 channel here:
        placement = Placement.objects.filter(id_campaign=campaign.id_campaign, id_channel=channel).values_list('id_time')
        placement = [x[0] for x in list(placement)]
        # here will append the array channels_and_placement with the value of the channel and the placement to use it in the html
        channels_and_placement[ch_n] = {"channel_name":channel.channel_name, "channel_id": channel.id_channel, "placement": placement}
        tab_channels_and_placement.append(channels_and_placement[ch_n])
        ch_n +=1

    placement = Placement.objects.filter(id_campaign=campaign.id_campaign).values_list('id_time')
    placement = [x[0] for x in list(placement)]

    print(channels_and_placement)

    data = {
        'campaign':campaign,
        'agency_by_brand':agency_by_brand,
        'channels':channels,
        'datetime':datetime,
        'placement_channel':placement_channel,
        'channels_and_placement':tab_channels_and_placement,
        'placement':placement
    }
    return render(request , 'DAIManagementApp/view_campaign_id.html',{'data':data})

@check_user
def views_campaigns(request):
    campaign =Campaigns.objects.filter(id_user=request.session['id_user']).order_by('-id_campaign')
    return render(request,'DAIManagementApp/views_campaigns.html',{'campaigns':campaign})

@check_user
def update_campaign(request,id_campaign=''):
    if request.method == 'POST':
        campaign = Campaigns.objects.get(pk=id_campaign)
        # Part 1
        name = request.POST.get('campaign_name')
        id_agency = request.POST.get("id_agency")
        id_brand = request.POST.get('id_brand')
        id_adpost = request.POST.get('id_adpost')

        # pacing = request.POST.get('pacing')

        # Part data
        start_date = request.POST.get('start_day')
        end_date = request.POST.get('end_day')

        pacing  = request.POST.get('pacing')
        volume  = request.POST.get('volume')
        delivery  = request.POST.get('delivery')
        cpm     = request.POST.get('cpm')


        rotation = request.POST.get('general_rotation')


        # Part placement




        #Insert


        brand = Brands.objects.get(pk=id_brand)
        # adpost = Adspots.objects.get(pk=id_adpost)
        adpost = 31
        agency =  Agency.objects.get(pk=id_agency)

        campaign.name=name
        campaign.id_advertiser=adpost
        campaign.id_brand=brand
        campaign.pacing=pacing
        campaign.start_day=start_date
        campaign.end_day=end_date
        campaign.volume=int(volume)
        campaign.general_rotation = rotation

        try :
            campaign.delivery=int(delivery)
        except:
            campaign.delivery=0


        campaign.cpm=float(cpm)
        campaign.save()
        now = datetime.datetime.now()
        activite = 'Edite Campaign'
        desc = 'Admin Edit Campaign  id: ' + str(id_campaign)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()


        channels = Channels.objects.filter(id_user=request.session['id_user'])
        if(rotation=='0'):
            p = Placement.objects.filter(id_campaign=campaign)
            p.delete()
            for channel in channels:
                channel_placements = request.POST.getlist('check_placement_'+str(channel.id_channel))
                if channel_placements != None:
                    for placement_of_channel in channel_placements:
                        time = DayTime.objects.get(pk=int(placement_of_channel))
                        placement = Placement(id_time=time ,id_campaign=campaign , id_channel=channel)
                        placement.save()
        else:
            channels_checked = request.POST.getlist('channels_checked')
            print(channels_checked)
            p = Placement.objects.filter(id_campaign=campaign)
            p.delete()
            if channels_checked != None:

                for channel in channels_checked:
                    time = DayTime.objects.get(id_time=13)

                    # p = [x[0] for x in list(p)]

                    # if channel not in p:
                    placement = Placement(id_time=time ,id_campaign=campaign,id_channel_id=channel)
                    placement.save()
                    # else:
                    #         placement = Placement.objects.get(id_time=time,id_campaign=campaign,id_channel_id=channel)
                    #         placement.id_time = time
                    #         placement.id_channel_id = channel
                    #         placement.save()

        # campaign = Campaigns.objects.filter(id_user_id=request.session['id_user'])
        return redirect('view_campaign', id_campaign=id_campaign)

@check_user
def load_region_dates(request, channel_id=1, region_id=1 ):
    channel_id = request.GET.get('channel')
    region_id = request.GET.get('region')
    zone = ChannelsZone.objects.get(zonename=region_id, id_channel=channel_id)
    playlists = Playlists.objects.filter(id_channel=channel_id, id_zone_channel=zone.id_zone_channel)
    dates =[]
    for playlist in playlists:
        datetimeobject = datetime.datetime.strptime(playlist.broadcastdate, '%Y-%m-%d')
        dayformat = datetimeobject.strftime('%d.%m.%Y')
        dates.append(dayformat)
    data = {'dates':dates}
    return JsonResponse(data, safe=False)



@check_user
def add_channels(request):
    if request.method == 'POST':
        name = request.POST.get('channel_name')
        lang = request.POST.get('language')
        genre = request.POST.get('genre')
        ftp = request.POST.get('ftp')
        sfr_channel_name = request.POST.get('sfr_channel_name')
        creation = modif = datetime.datetime.now()
        user = Users.objects.get(pk=request.session['id_user'])
        channel_decs = request.POST.get("channel_desc")
        ch = Channels(id_user = user , channel_name=name,channel_desc=channel_decs ,channel_lang=lang , channel_genre= genre , ftp_channel_name= ftp , sfr_channel_name=sfr_channel_name ,creation_datetime=creation , modif_datetime=modif)

        ch.save()
        now = datetime.datetime.now()
        activite = 'Add Channels'
        desc = 'Admin Add Channels  id: ' + str(ch.id_channel)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        return  redirect('/DAIManagement/edit_channels/')

    return render(request,'DAIManagementApp/add_channels.html')

@check_user
def edit_channels(request,id_channel=""):
    if request.method == 'POST':
        print(id_channel)
        channel = Channels.objects.get(pk=id_channel)
        return render(request, 'DAIManagementApp/edit_channels_id.html',{'channel':channel})
    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user).order_by('-id_channel')
    return render(request,'DAIManagementApp/edit_channels.html',{'channels' : channels} )

@check_user
def update_channels(request , id_channel=""):
    if request.method == 'POST':
        name = request.POST.get('channel_name')
        lang = request.POST.get('language')
        genre = request.POST.get('genre')
        ftp = request.POST.get('ftp')
        sfr = request.POST.get('sfr')
        zone_name = request.POST.get('zone_name')
        network = request.POST.get("networkname")
        modif = datetime.datetime.now()
        channel_decs = request.POST.get("channel_desc")

        user = Users.objects.get(pk=request.session['id_user'])
        channel = Channels.objects.filter(id_channel=id_channel,id_user=user)[0]
        channel.channel_name = name
        channel.channel_lang =lang
        channel.channel_genre = genre
        channel.ftp_channel_name = ftp
        channel.sfr_channel_name = sfr
        channel.zonename = zone_name
        channel.network = network
        channel.channel_desc = channel_decs
        channel.modif_datetime = modif
        now = datetime.datetime.now()
        activite = 'Edit Channel'
        desc = 'Admin Edit Channel  id: ' + str(id_channel)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()

        channel.save()
        return redirect('/DAIManagement/edit_channels/')

@check_user
def delete_channel(request ,id_channel=""):
    if request.method == 'POST':
        ch = Channels.objects.get(pk=id_channel).delete()
        now = datetime.datetime.now()
        activite = 'Delete Channel'
        desc = 'Admin delete Channel  id: ' + str(id_channel)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        #ch.save()
    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user).order_by('-id_channel')
    return render(request,'DAIManagementApp/delete_channel.html', {'channels' : channels})



@check_user
def add_jingle(request):
    if request.method == 'POST' :
        name = request.POST.get('jingle_name')
        channel = request.POST.get("channel_id")
        status = request.POST.get('active_0_1')

        path = "jingles/user_"+str(request.session['id_user'] )+"/channel_"+channel + "/"+name

        if not os.path.exists("static/"+path):
            os.makedirs("static/"+path)

        filename_video_ext = '.'+request.FILES['video_file'].name.split(".")[1].lower()
        filename_video = name+filename_video_ext
        # filename_video = FILES['video_file'].name
        filename_md5 = name+".md5"
        handle_uploaded_file(request.FILES['video_file'], "static/"+path, filename_video)
        handle_uploaded_file(request.FILES['md5_file'], "static/"+path, filename_md5)
        now = modif = datetime.datetime.now()
        ch = Channels.objects.get(pk=channel)
        jingle = Jingles(id_channel=ch,jingle_name=name,video_jingle=path+"/"+filename_video,md5_file=path+"/"+filename_md5,creation_datetime=now,modif_datetime=modif,status=status, is_delete='0' )
        now - datetime.datetime.now()
        activite = 'Add Jingle'
        desc = 'Admin Add Jingle  id: ' + str(jingle.id_jingle)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
        jingle.save()
        return redirect("/DAIManagement/edit_jingle/")
    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user)

    return render(request,'DAIManagementApp/add_jingles.html',{'channels':channels})

@check_user
def edit_jingle(request , id_jingle=""):
    if request.method == 'POST':
        jingle = Jingles.objects.get(pk=id_jingle)
        user = Users.objects.get(pk=request.session['id_user'])
        channels = Channels.objects.filter(id_user=user)
        data =  {
            'jingle' : jingle,
            'channels' : channels
        }
        return render(request , 'DAIManagementApp/edit_jingles_id.html', {'data' : data})
    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user)
    # .values_list('id_channel')
    # channels = [x[0] for x in channels]

    jingles = Jingles.objects.filter(id_channel__in=channels,is_delete=0).order_by('-id_jingle')
    return render(request,'DAIManagementApp/edit_jingles.html' , {'jingles':jingles})

@check_user
def update_jingle(request,id_jingle=""):
    if request.method == 'POST' :
        jingle = Jingles.objects.get(pk=id_jingle)
        name = request.POST.get('jingle_name')
        channel = request.POST.get("channel_id")
        print(channel)
        status = request.POST.get('active_0_1')
        path = "jingles/user_"+str(request.session['id_user'] )+"/channel_"+str(channel) + "/"+name
        try:
            filename_video_ext = request.FILES['video_file'].name.split(".")[1].lower()
            filename_video = name+filename_video_ext
        except:
            from pathlib import Path
            filename_video = Path(jingle.video_jingle).name


        filename_md5 = name+".md5"
        if not os.path.exists("static/"+path):
                os.makedirs("static/"+path)
        try :
            handle_uploaded_file(request.FILES['video_file'], "static/"+path, filename_video)
            os.remove("static/"+ jingle.video_jingle)
        except :
            import shutil
            if os.path.exists("static/"+ jingle.video_jingle):
                    shutil.move("static/"+ jingle.video_jingle, "static/"+path+"/"+filename_video)

        try :

            handle_uploaded_file(request.FILES['md5_file'], "static/"+path, filename_md5)
            os.remove("static/"+ jingle.md5_file)


        except :
            import shutil
            if os.path.exists("static/"+ jingle.md5_file):
                    shutil.move("static/"+ jingle.md5_file, "static/"+path+"/"+filename_md5)

        jingle.jingle_name= name
        ch = Channels.objects.get(pk=channel)
        jingle.id_channel = ch
        jingle.status = status == '1'
        jingle.video_jingle=path+"/"+filename_video
        jingle.md5_file=path+"/"+filename_md5
        jingle.modif_datetime = datetime.datetime.now()
        jingle.save()
        now = datetime.datetime.now()
        activite = 'Edit Jingle'
        desc = 'Admin Edit Jingle  id: ' + str(id_jingle)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()


        return redirect("/DAIManagement/edit_jingle/")

@check_user
def delete_jingle(request,id_jingle=''):
    if request.method == 'POST':
        jingle = Jingles.objects.get(pk=id_jingle)
        jingle.modif_datetime = datetime.datetime.now()
        jingle.is_delete = 1
        jingle.save()
        now = datetime.datetime.now()
        activite = 'Delete Jingle'
        desc = 'Admin Delete Jingle  id: ' + str(id_jingle)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()

    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user)


    jingles = Jingles.objects.filter(id_channel__in=channels,is_delete=0).order_by('-id_jingle')

    return render(request,'DAIManagementApp/delete_jingles.html', {'jingles': jingles})

@check_user
def views_jingles(request,id_channel=''):
    if  id_channel != '':
        jingles = Jingles.objects.filter(id_channel = id_channel)
        return render(request,'DAIManagementApp/views_channel_jingles.html',{'jingles':jingles})

    user = Users.objects.get(pk=request.session['id_user'])
    channels = Channels.objects.filter(id_user=user)
    ch = []
    for i in channels :
        jingles = Jingles.objects.filter(id_channel=i,is_delete=0)
        rest = {
            'channel' : i,
            'total'   : len(jingles)
        }
        ch.append(rest)

    return render(request,'DAIManagementApp/views_jingles.html',{'data': ch})



@check_user
def sfr_upload(request):
    if request.method == 'POST':
        filename = str(request.FILES['sfr_file']).replace(' ','_')
        print(filename)
        path = "sfr/user_"+ request.session['id_user']
        if not os.path.exists(path):
                os.makedirs(path)
        handle_uploaded_file(request.FILES['sfr_file'], path, filename)
        insert_sfr(path+'/'+filename)
    return render(request , "DAIManagementApp/sfr_upload.html" )

def add_line_to_excel(file_path, new_data):
    import openpyxl

    """
    Adds a new line at the beginning of an Excel file.

    Parameters:
    file_path (str): Path to the Excel file.
    new_data (list): Data to be inserted in the new first row.
    """
    # Load the workbook and select the active worksheet
    workbook = openpyxl.load_workbook(file_path)
    worksheet = workbook.active

    # Insert a new row at the beginning
    worksheet.insert_rows(1)

    # Add new data to the first row
    for col, data in enumerate(new_data, start=1):
        worksheet.cell(row=1, column=col, value=data)

    # Save the workbook
    workbook.save(file_path)
    return "Line added successfully."

def insert_sfr(file):
    #
    import re
    import pandas as pd
    from dateutil import parser
    from django.db import IntegrityError


    new_data = ['New', 'Row', 'Data', 'Here']   # Replace with your data
    add_line_to_excel(file, new_data)

    df = pd.read_excel(file)
    columns =  ["cols_"+ str(i) for i in range(len(df.columns))]
    df.columns = columns

    info = df.head(7)[["cols_3","cols_4"]]

    dic = {}

    for index, row in info.iterrows():
        if str(row["cols_3"]) in ["Date", "Chaîne(s)", "Cible(s)", "KPI"]:
            dic[str(row["cols_3"]).replace('(s)','').strip()] = row["cols_4"]

    dic['Chaîne'] = dic['Chaîne'].split(', ')

    for col in columns[4:]:
        test = df[13:][["cols_3",col]]
        name = test.iloc[0][col]
        for index, row in test.iterrows():
            line = 1
            if str(row["cols_3"]) != 'Heure':
                for channel in dic['Chaîne']:
                    line += 1
                    #
                    try:
                        # Create SFR Analytics Object
                        sfr = Sfr_analytics(
                            sfr_channel_name = channel,
                            cible = dic['Cible'],
                            region = "France",
                            indicateur = dic["KPI"]
                        )
                        #
                        date = datetime.datetime.strptime(dic['Date'], '%d/%m/%Y')
                        #
                        heure = row["cols_3"]
                        #
                        if isinstance(heure, datetime.time):
                            heure_time = heure
                            heure_time = datetime.datetime.strptime(heure_time.strftime('%H:%M:%S'), '%H:%M:%S').time()
                        elif isinstance(heure, datetime.datetime):
                            heure_time = heure.time()
                            heure_time = datetime.datetime.strptime(heure_time.strftime('%H:%M:%S'), '%H:%M:%S').time()
                        #
                        midnight = datetime.datetime.combine(date.date(), datetime.time(0, 0, 0))
                        #
                        am_3 = datetime.datetime.combine(date.date(), datetime.time(3, 0, 0))
                        #
                        combined_heure = datetime.datetime.combine(date, heure_time)
                        #
                        sfr.day = date  + timedelta(days=1) if midnight <= combined_heure < am_3 else date
                        #
                        sfr.minute = heure_time
                        # remove percentage sign and convert to float
                        sfr.purcent = float(re.sub('%', '', str(row[col])))
                        #
                        sfr.save()
                    except IntegrityError:
                        print("This entry already exists in the database.")



# def insert_sfr(file):
#     #
#     import re
#     import pandas as pd
#     from dateutil import parser
#     from django.db import IntegrityError


#     new_data = ['New', 'Row', 'Data', 'Here']   # Replace with your data
#     add_line_to_excel(file, new_data)

#     df = pd.read_excel(file)
#     columns =  ["cols_"+ str(i) for i in range(len(df.columns))]
#     df.columns = columns

#     info = df.head(7)[["cols_3","cols_4"]]

#     dic = {}

#     for index, row in info.iterrows():

#         if str(row["cols_3"]) in ["Date", "Chaîne(s)", "Cible(s)", "KPI"]:
#             dic[str(row["cols_3"]).replace('(s)','').strip()] = row["cols_4"]

#     dic['Chaîne'] = dic['Chaîne'].split(', ')
#     # day = dic['Date']
#     # day = df.iloc[0, 4]

#     #
#     # # Save DataFrame to CSV file
#     # df.to_csv('file.csv', index=False)
#     #
#     # # Load the CSV file
#     # df_csv = pd.read_csv('file.csv')
#     #
#     # # Retrieve the value from cell E1
#     # # Note that Python uses 0-indexing, so '4' here represents the 5th column (column E in Excel)
#     # day = df_csv.iat[0, 4]
#     last_minute = ''
#     for col in columns[4:] :

#         test = df[13:][["cols_3",col]]
#         name = test.iloc[0][col]
#         for index, row in test.iterrows():
#             line = 1
#             if str(row["cols_3"]) != 'Heure':
#                 for channel in dic['Chaîne']:
#                     line += 1
#                     # Create SFR Analytics Object
#                     sfr = Sfr_analytics(
#                         sfr_channel_name = channel,
#                         cible = dic['Cible'],
#                         region = "France",
#                         indicateur = dic["KPI"]
#                     )
#                     #
#                     day = dic['Date']
#                     date = datetime.datetime.strptime(day, '%d/%m/%Y')

#                     heureok = str(row["cols_3"]).split('.')
#                     # if index == 879:
#                     #     tot = ada
#                     if index>=1274:
#                         date += datetime.timedelta(days=1)
#                         heureok = str(row["cols_3"]).split('.')
#                         # heureok = str(heure[1]).split('.')
#                         # time = heureok[0]
#                         if "03:00:00" in heureok:
#                             # time = str(heureok)+".00"
#                             break
#                         else:
#                             time = heureok[0][11:]

#                     else:
#                         heure = str(row["cols_3"]).split(':')
#                         if heure[1] == last_minute:
#                             if heure[1] == '59':
#                                     h2 = int(heure[0]) +1
#                                     heure[0] = str(h2)
#                                     heure[1] = '00'
#                             h2 = int(heure[1]) +1
#                             heure[1] = str(h2)
#                         time = str(int(heure[0])%24)+':'+heure[1]
#                         # if time == "17:25":
#                         #     tot = ada


#                         # date += datetime.timedelta(days=1)
#                     # tot +=2

#                     # heure = str(row["cols_3"]).split(':')
#                     # if int(heure[0]) >= 24 :
#                     #     time = str(int(heure[0])%24)+':'+heure[1]
#                     #     date += datetime.timedelta(days=1)
#                     # else :
#                     #     time = str(int(heure[0]))+':'+heure[1]
#                     sfr.day = date
#                     sfr.minute = time
#                     print(sfr.minute)
#                     last_minute = heure[1]
#                     # remove percentage sign and convert to float
#                     sfr.purcent = float(re.sub('%', '', str(row[col])))
#                     # sfr.save()

#                     try:
#                         sfr.save()
#                     except IntegrityError:
#                         print("This entry already exists in the database.")


def  insert_sfr_backup(file):
    import numpy as np
    import pandas as pd
    # df = pd.read_excel(file,encode="utf-8")
    df = pd.read_excel(file)
    columns =  ["cols_"+ str(i) for i in range(len(df.columns))]
    df.columns = columns
    info = df.head(6)[["cols_1","cols_2"]]

    dic = {}

    t = list(info)
    for index, row in info.iterrows():
        #print(row["cols_1"])
        if str(row["cols_1"]) == "Date" or str(row["cols_1"]) == "Région" or str(row["cols_1"]) == "Cible"  or str(row["cols_1"]) == "Indicateur : "    :
            dic[str(row["cols_1"]).replace(':','').strip()]=row["cols_2"]

    for col in columns[2:] :

        print(col)
        test = df[10:][["cols_1",col]]
        name = test.iloc[0][col]
        for index, row in test.iterrows():

            if str(row["cols_1"]) != 'nan':
                sfr = Sfr_analytics(sfr_channel_name = name ,cible=dic['Cible'],region= dic["Région"],indicateur=dic["Indicateur"] )
                day = str(dic['Date']).split(' ')[1]
                from dateutil import parser
                # date = parser.parse(day)
                date = datetime.datetime.strptime(str(day), '%d/%m/%Y')

                heure = str(row["cols_1"]).split(':')
                if int(heure[0]) >= 24 :
                    time = str(int(heure[0])%24)+':'+heure[1]
                    date += datetime.timedelta(days=1)
                else :
                    time = str(int(heure[0]))+':'+heure[1]
                sfr.day = date
                sfr.minute = time
                sfr.purcent= row[col]
                sfr.save()


@check_user
def edit_impressions(request):
    imp = Impressions.objects.get(id='1')
    if request.method == 'POST':
        name = request.POST.get('name')
        users = request.POST.get('users')
        purcent = request.POST.get('purcent')
        region = request.POST.get('region')
        if name != "":
            imp.tool_name = name
        if users != "":
            imp.total_users = int(users)
        if purcent != "":
            imp.market_share_purcent = float(purcent)
        if region != "":
            imp.region = region
        imp.save()

    return render(request,'DAIManagementApp/edit_impressions.html' , {'imp':imp})

@check_user
def chart_test(request):
    from django.db import connection
    if request.method == 'POST':
        channel = Channels.objects.get(pk = request.POST.get('channel_id'))
        zonename =  request.POST.get('zonename')
        region = ChannelsZone.objects.get(zonename=zonename, id_channel = channel.id_channel)
        day =  request.POST.get('day')
        day = datetime.datetime.strptime(day, '%m/%d/%Y')
        dayformat = day.strftime('%Y-%m-%d')
        val = Sfr_analytics.objects.filter(sfr_channel_name="2M Maroc", region = region.region, day=str(dayformat)).values_list("minute","purcent")
        purcent = Impressions.objects.get(pk='1')
        nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)
        labels  = [x[0] for x in val ]
        data  = [int(float(x[1])*nb) for x in val]
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, 'DAIManagementApp/pie_chart.html', {'labels': labels,'data': data, 'day': dayformat, 'channels': channels})
    else:
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, 'DAIManagementApp/pie_chart.html',{'channels': channels})

@check_user
def verifs(request):
    from datetime import datetime

    channels =Channels.objects.filter(id_user = request.session['id_user']).values_list('channel_name')
    channels = [x[0] for x in channels]
    # lines  = Verifs.objects.filter(airStatuscode="0001")
    lines  = Verifs.objects.all().order_by('-airTime')[:300]
    data = []

    for line in lines:
        if line.networkname in channels :
            d = str(line.airTime).split(' ')[0].split('-')
            d = d[0]+'-'+ d[2] +'-'+ d[1]
            p ={
                'channel':line.networkname,
                'name' : line.spotId,
                'day' : d
            }

            region = ChannelsZone.objects.get(zonename =line.zonename)
            p['region'] = region.region
            min_ss = str(line.airTime).split(' ')[1]
            min = str(line.airTime).split(' ')[1].split(':')
            minute = min[0]+':'+min[1]+':00'
            p['minute'] = minute
            p['minute_ss'] = min_ss
            p['color'] = '#00800000'
            if  str(line.airStatuscode) == '0001':
                p['status'] = 'Aired Successfully'
                p['color'] = '#2c2c8cb3'
                try :
                    donnees = Sfr_analytics.objects.filter(day=p['day'],minute=p['minute'])
                except :
                    donnees = Sfr_analytics.objects.filter(day=str(line.airTime).split(' ')[0],minute=p['minute'])
                print(donnees)
                for i in donnees :
                    if p['channel'] in i.sfr_channel_name :
                        purcent = Impressions.objects.get(pk='1')
                        nb  = float(i.purcent) * float(purcent.total_users) / float(purcent.market_share_purcent)
                        p['nb_wach'] = nb
            elif str(line.airStatuscode) == '0008':
                p['status'] = 'Failed, Adspot cut'
                p['nb_wach'] = '-'

            elif str(line.airStatuscode) == '1005':
                p['status'] = 'Not aired yet'
                p['nb_wach'] = '-'
                p['color'] = '#c7c7c7b3'

            else :
                p['status'] = 'Failed, Other Reason.'
                p['nb_wach'] = '-'
                p['color'] = '#c7c7c7b3'


            data.append(p)
    return render(request,'DAIManagementApp/verifs.html',{'data':data})

@check_user
def playlists_results(request,id_playlist="1"):
    if request.method == 'POST':
        edit_btn = request.POST.get("edit_btn")
        view_btn = request.POST.get("view_btn")
        insertion_results_btn = request.POST.get("insertion_results")
        duplicate_btn = request.POST.get("duplicate_btn")
        if(edit_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/edit_playlist_id.html", data_playlist)
        elif(duplicate_btn):
            playlist = Playlists.objects.get(pk=id_playlist)
            channels = Channels.objects.filter(id_user=request.session['id_user'])
            region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
            d = playlist.broadcastdate

            win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

            data = {}
            data["windows"] = []
            data["number_of_wins"] = 0
            i = 0

            windows = Windows.objects.filter(id_playlist=playlist.id_playlist).order_by('window_start')
            for window in windows:
                window_dic = {}
                window_dic['i'] = i
                i = i+1
                window_dic['id_window'] = window.id_window
                window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
                window_start_formated_2 = window_start_formated.strftime("%H:%M")
                window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
                window_end_formated_2 = window_end_formated.strftime("%H:%M")
                window_dic['window_start'] = window_start_formated_2
                window_dic['window_end'] = window_end_formated_2
                window_dic['avails_in_win'] = []
                window_dic['num_of_avails'] = 0
                data["number_of_wins"] = i
                avails = Avails.objects.filter(id_window=window.id_window)
                j = 0
                for avail in avails:
                    avail_dic = {}
                    avail_dic["j"] = j
                    j = j+1
                    avail_dic["id_avail"] = avail.id_avail
                    avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                    avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                    avail_dic["avail_start"] = avail_start_formated2
                    avail_dic["adspots_in_avail"] = []
                    window_dic["avails_in_win"].append(avail_dic)
                    adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                    window_dic['num_of_avails'] = j
                    k = 0
                    avail_dic["num_of_adspots"] = 0
                    for adspot in adspots:
                        adspot_dic = {}
                        adspot_dic["k"] = k
                        k = k+1
                        avail_dic["num_of_adspots"] = k
                        adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                        adspot_dic["id_adspot"] = adspot.id_adspot
                        avail_dic["adspots_in_avail"].append(adspot_dic)
                data["windows"].append(window_dic)


            data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
            return render(request, "DAIManagementApp/duplicate_playlist_id.html", data_playlist)
        elif(view_btn):
            return redirect('view_playlist', id_playlist=id_playlist)
        else:
            return redirect('insertion_results', id_playlist=id_playlist)
    else:
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels).order_by('-id_playlist')
        data = {'playlists': playlists}
        return render(request, "DAIManagementApp/playlists_results.html",data)


@check_user
def draft_playlist(request,id_playlist="1"):
    if request.method == 'POST':
        playlist = Playlists.objects.get(pk=id_playlist)
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
        d = playlist.broadcastdate

        win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)

        data = {}
        data["windows"] = []
        data["number_of_wins"] = 0
        i = 0

        windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
        for window in windows:
            window_dic = {}
            window_dic['i'] = i
            i = i+1
            window_dic['id_window'] = window.id_window
            window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
            window_start_formated_2 = window_start_formated.strftime("%H:%M")
            window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
            window_end_formated_2 = window_end_formated.strftime("%H:%M")
            window_dic['window_start'] = window_start_formated_2
            window_dic['window_end'] = window_end_formated_2
            window_dic['avails_in_win'] = []
            window_dic['num_of_avails'] = 0
            data["number_of_wins"] = i
            avails = Avails.objects.filter(id_window=window.id_window)
            j = 0
            for avail in avails:
                avail_dic = {}
                avail_dic["j"] = j
                j = j+1
                avail_dic["id_avail"] = avail.id_avail
                avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
                avail_start_formated2 = avail_start_formated.strftime("%H:%M")
                avail_dic["avail_start"] = avail_start_formated2
                avail_dic["adspots_in_avail"] = []
                window_dic["avails_in_win"].append(avail_dic)
                adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
                window_dic['num_of_avails'] = j
                k = 0
                avail_dic["num_of_adspots"] = 0
                for adspot in adspots:
                    adspot_dic = {}
                    adspot_dic["k"] = k
                    k = k+1
                    avail_dic["num_of_adspots"] = k
                    adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                    adspot_dic["id_adspot"] = adspot.id_adspot
                    avail_dic["adspots_in_avail"].append(adspot_dic)
            data["windows"].append(window_dic)

        data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"]}
        return render(request, "DAIManagementApp/edit_draft_playlist_id.html", data_playlist)
    else:
        id_session = str(request.session['id_user'])
        channels = Channels.objects.filter(id_user=id_session).values_list('id_channel', flat=True)
        playlists = Playlists.objects.filter(id_channel__in=channels,is_draft='1').order_by('-id_playlist')
        data = {'playlists': playlists}
        return render(request, "DAIManagementApp/draft_playlist.html",data)

@check_user
def update_draft_playlist(request, id_playlist):
    if request.method == 'POST':
        apply_btn = request.POST.get("apply_btn")
        draft_btn = request.POST.get("draft_btn")
        if(apply_btn):
            id_user = request.session['id_user']
            useraccess = Useraccess.objects.get(id_user=id_user)
            channel_id = request.POST.get('channel_id')
            channeldata = Channels.objects.get(id_channel=channel_id)
            zonename = request.POST.get('zonename')
            daydate = request.POST.get('day')
            number_of_windows = request.POST.get('numofwin')
            #return HttpResponse(number_of_windows)
            channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
            version = request.POST.get('version')

            daydate = datetime.datetime.strptime(str(daydate), '%Y-%m-%d')
            daydate = daydate.strftime('%Y-%m-%d')
            start_date = str(daydate) + "T00:01:00+00:00"
            end_date = str(daydate) + "T23:59:00+00:00"
            now = datetime.datetime.now()

            max_version = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('version')).get('version__max')
            new_version = int(max_version)
            max_version_draft = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('draft_version')).get('draft_version__max')
            new_version_draft = int(max_version_draft)
            Playlist = Playlists(id_channel_id=channel_id ,version=int(max_version), broadcastdate=str(daydate), start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel,is_draft='0',draft_version=int(new_version_draft))
            Playlist.save()
            traffic = 0
            record_inserted = 0
            var_test = 0
            for i in range(int(number_of_windows)):
                if request.POST.get('numofavails['+str(i)+']'):
                    numofavails = request.POST.get('numofavails['+str(i)+']')
                    window_start = request.POST.get('windowstart['+str(i)+']')
                    window_start = daydate+' '+window_start+':00'
                    window_end = request.POST.get('windowend['+str(i)+']')
                    window_end = daydate+' '+window_end+':00'
                    FMT = '%Y-%m-%d %H:%M:%S'
                    window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                    window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                    window_duration = window_duration.strftime('%H%M%S00')
                    Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                    Window.save()

                    for j in range(int(numofavails)):
                        if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                            av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                            av_start = daydate+' '+av_start+':00'
                            number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                            Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                            Avail.save()

                            for k in range(int(number_of_ads)):
                                if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                    adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                    traffic +=1
                                    AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                                    AdspotsInAv.save()
                                    record_inserted = 1
            last_id_playlist = Playlist.id_playlist
            xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, str(new_version), str(max_version_draft))
            path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
            # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)

            ############ Display data on view_playlist_id page ##################
            return redirect('view_playlist', id_playlist=last_id_playlist)
        else:
            id_user = request.session['id_user']
            useraccess = Useraccess.objects.get(id_user=id_user)
            channel_id = request.POST.get('channel_id')
            channeldata = Channels.objects.get(id_channel=channel_id)
            zonename = request.POST.get('zonename')
            daydate = request.POST.get('day')
            number_of_windows = request.POST.get('numofwin')
            #return HttpResponse(number_of_windows)
            channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
            version = request.POST.get('version')

            daydate = datetime.datetime.strptime(str(daydate), '%Y-%m-%d')
            daydate = daydate.strftime('%Y-%m-%d')
            start_date = str(daydate) + "T00:01:00+00:00"
            end_date = str(daydate) + "T23:59:00+00:00"
            now = datetime.datetime.now()

            max_version = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('version')).get('version__max')
            new_version = int(max_version)

            max_version_draft = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('draft_version')).get('draft_version__max')
            new_version_draft = int(max_version_draft)
            Playlist = Playlists(id_channel_id=channel_id ,version=int(max_version), broadcastdate=str(daydate), start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel,is_draft='1',draft_version=int(max_version_draft)+1)
            Playlist.save()
            traffic = 0
            record_inserted = 0
            var_test = 0
            for i in range(int(number_of_windows)):
                if request.POST.get('numofavails['+str(i)+']'):
                    numofavails = request.POST.get('numofavails['+str(i)+']')
                    window_start = request.POST.get('windowstart['+str(i)+']')
                    window_start = daydate+' '+window_start+':00'
                    window_end = request.POST.get('windowend['+str(i)+']')
                    window_end = daydate+' '+window_end+':00'
                    FMT = '%Y-%m-%d %H:%M:%S'
                    window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
                    window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
                    window_duration = window_duration.strftime('%H%M%S00')
                    Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
                    Window.save()

                    for j in range(int(numofavails)):
                        if request.POST.get('availstart['+str(i)+']['+str(j)+']'):
                            av_start = request.POST.get('availstart['+str(i)+']['+str(j)+']')
                            av_start = daydate+' '+av_start+':00'
                            number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                            Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                            Avail.save()

                            for k in range(int(number_of_ads)):
                                if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                                    adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                                    traffic +=1
                                    AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                                    AdspotsInAv.save()
                                    record_inserted = 1
            last_id_playlist = Playlist.id_playlist
            # xmlfilename = GenerateXMLfromDatabase(daydate, channel_id, channel_zone.id_zone_channel, str(new_version) )
            # path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
            # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)

            ############ Display data on view_playlist_id page ##################
            return redirect('draft_playlist')

@check_user
def predict_sfr(request):
    from django.db import connection
    if request.method == 'POST':
        channel = Channels.objects.get(pk = request.POST.get('channel_id'))
        zonename =  request.POST.get('zonename')
        region = ChannelsZone.objects.get(zonename=zonename, id_channel = channel.id_channel)
        #day =  request.POST.get('day')
        #day = datetime.datetime.strptime(day, '%Y-%m-%d')
        #dayformat = day.strftime('%Y-%m-%d')
        val = Sfr_analytics.objects.filter(sfr_channel_name='2M Maroc' , region = region.region).values_list("day","minute","purcent")
        purcent = Impressions.objects.get(pk='1')
        labels,predict_val = predict(val)
        nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)

        data  = [int(float(x)*nb) for x in predict_val ]
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, 'DAIManagementApp/predict.html', {'labels': labels,'data': data, 'day': '2021-09=19', 'channels': channels})
    else:
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, 'DAIManagementApp/predict.html',{'channels': channels})

def predict(val ):
    import datetime
    import pandas as pd
    import numpy as np
    import calendar
    from statsmodels.tsa.arima_model import ARIMA
    df = pd.DataFrame(val , columns = ['Day','Minute','Purcent'])
    #df['Datetime'] = df['Day'] + ' ' + df['Minute']
    week = []
    for i in df['Day']:
        week.append(calendar.day_name[datetime.datetime.strptime(str(i), '%Y-%m-%d').weekday()])
    day = datetime.datetime.now()
    day_week = calendar.day_name[day.weekday()-1]
    df['Day_Week'] = week

    df_final  = df.loc[ df['Day_Week']==day_week  ]
    df_final.index = range(0,len(df_final['Minute']))
    from sklearn.preprocessing import PolynomialFeatures
    from sklearn.pipeline import make_pipeline
    from sklearn.linear_model import LinearRegression
    degree=8
    polyreg=make_pipeline(PolynomialFeatures(degree),LinearRegression())
    X= np.array(df_final.index).reshape(-1,1)
    y= np.array(df_final['Purcent']).reshape(-1,1)
    polyreg.fit(X,y)
    predict = polyreg.predict(X)
    df_final.drop_duplicates(subset=['Minute'])

    day += datetime.timedelta(days=6)
    label = df['Minute']
    for i in range(0,len(predict)) :
        line = Sfr_predict( sfr_channel_name = '2M Maroc',day = day , minute=label[i] , purcent=abs(predict[i]))
        line.save()

    return (label , predict)

@check_user
def predict_adbreak_old(request):
    import pandas as pd
    import calendar
    import datetime
    channels= AdbreakHistory.objects.all().values_list('id_channel').distinct()
    channels = [i[0] for i in channels ]
    for channel in channels:
        adbreak = AdbreakHistory.objects.filter(id_channel=channel).values_list('channel_name','day','time','duration')
        df = pd.DataFrame(adbreak , columns=['Channel','Day','Time','Duration'])
        week = []
        for i in df['Day']:
                week.append(calendar.day_name[datetime.datetime.strptime(str(i), '%Y-%m-%d').weekday()])
        df['week'] = week
        dur =[float(i)for i in df['Duration']]
        df['Duration']= dur
        time = [x[:-3] for x in df['Time']]
        t=[]
        for i in time :
            m = int(i[3:]) - int(i[3:])%30
            if len(str(m)) ==1:
                m =str(m)+'0'
            t.append(i[:3]+str(m))
        df['Time'] = t
        #print(type(day))

        day = datetime.datetime.strptime('2022-05-26','%Y-%m-%d')
        day_week = calendar.day_name[day.weekday()]
        df2 = df.loc[df['week'] == day_week]

        all_time = list(df2['Time'])
        time_ad = set(all_time)
        count = [ { 'time' :i , 'count' : all_time.count(i) }  for i in time_ad]
        fact = [ { 'time' :i['time'] , 'fact' : i['count'] / 3 }  for i in count]
        #day = datetime.datetime.now()
        day += datetime.timedelta(days=7)

        for i in fact :
                if i['fact'] >= 0.6 :
                    df3 = df2.loc[df2['Time'] == i['time']]
                    mean = sum(df3['Duration'])/len(df3['Duration'])
                    mean = round(mean,2)
                    intervale = int(30 // round(i['fact']))
                    for j in range(round(i['fact'])) :
                        minute =i['time'][3:]
                        minute = str(j*intervale +int(minute) )
                        if len(minute)==1:
                            minute ='0' + minute

                        time_  = i['time'].split(':')[0]+':' + minute
                        channel_id = Channels.objects.get(pk=channel)
                        adbreak_predict = Adbreak_predict(id_channel=1 ,channel_name =channel_id.channel_name , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
                        adbreak_predict.save()
        return render(request,'DAIManagementApp/index.html')


@check_user
def predict_adbreak(request):
    import pandas as pd
    import calendar
    import datetime
    channels= AdbreakHistory.objects.all().values_list('id_channel').distinct()
    channels = [i[0] for i in channels ]
    for channel in channels:
        adbreak = AdbreakHistory.objects.filter(id_channel=channel).values_list('channel_name','day','time','duration')
        df = pd.DataFrame(adbreak , columns=['Channel','Day','Time','Duration'])
        week = []
        for i in df['Day']:
                week.append(calendar.day_name[datetime.datetime.strptime(str(i), '%Y-%m-%d').weekday()])
        df['week'] = week
        dur =[float(i)for i in df['Duration']]
        df['Duration']= dur
        time = [x[:-3] for x in df['Time']]
        t=[]
        for i in time :
            m = int(i[3:]) - int(i[3:])%30
            if len(str(m)) ==1:
                m =str(m)+'0'
            t.append(i[:3]+str(m))
        df['Time'] = t
        #print(type(day))

        import calendar
        cal = calendar.Calendar()

        for day1 in cal.itermonthdays(2022, 6):
            # print day1
            if day1 == 0:
                day1 = 1
            day = '2022-06-0'+str(day1)

            day = datetime.datetime.strptime(day,'%Y-%m-%d')
            day_week = calendar.day_name[day.weekday()]
            df2 = df.loc[df['week'] == day_week]

            all_time = list(df2['Time'])
            time_ad = set(all_time)
            count = [ { 'time' :i , 'count' : all_time.count(i) }  for i in time_ad]
            fact = [ { 'time' :i['time'] , 'fact' : i['count'] / 3 }  for i in count]
            #day = datetime.datetime.now()
            day += datetime.timedelta(days=7)

            for i in fact :
                    if i['fact'] >= 0.6 :
                        df3 = df2.loc[df2['Time'] == i['time']]
                        mean = sum(df3['Duration'])/len(df3['Duration'])
                        mean = round(mean,2)
                        intervale = int(30 // round(i['fact']))

                        for j in range(round(i['fact'])) :
                            minute =i['time'][3:]
                            minute = str(j*intervale +int(minute) )
                            if len(minute)==1:
                                minute ='0' + minute

                            time_  = i['time'].split(':')[0]+':' + minute
                            channel_id = Channels.objects.get(pk=channel)
                            adbreak_predict = Adbreak_predict(id_channel=1 ,channel_name =channel_id.channel_name , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
                            adbreak_predict.save()

        return render(request,'DAIManagementApp/index.html')



def predict_adbreak99(day):
    import pandas as pd
    import calendar
    import datetime
    channels= AdbreakHistory.objects.all().values_list('id_channel').distinct()
    channels = [i[0] for i in channels ]
    for channel in channels:
        adbreak = AdbreakHistory.objects.filter(id_channel=channel).values_list('channel_name','day','time','duration')
        df = pd.DataFrame(adbreak , columns=['Channel','Day','Time','Duration'])
        week = []
        for i in df['Day']:
                week.append(calendar.day_name[datetime.datetime.strptime(str(i), '%Y-%m-%d').weekday()])
        df['week'] = week
        dur =[float(i)for i in df['Duration']]
        df['Duration']= dur
        time = [x[:-3] for x in df['Time']]
        t=[]
        for i in time :
            m = int(i[3:]) - int(i[3:])%30
            if len(str(m)) ==1:
                m =str(m)+'0'
            t.append(i[:3]+str(m))
        df['Time'] = t
        print(type(day))
        day = datetime.datetime.strptime(day,'%Y-%m-%d')
        day_week = calendar.day_name[day.weekday()]
        df2 = df.loc[df['week'] == day_week]

        all_time = list(df2['Time'])
        time_ad = set(all_time)
        count = [ { 'time' :i , 'count' : all_time.count(i) }  for i in time_ad]
        fact = [ { 'time' :i['time'] , 'fact' : i['count'] / 3 }  for i in count]
        #day = datetime.datetime.now()
        day += datetime.timedelta(days=7)

        for i in fact :
            ifact = i['fact']

            if i['fact'] >= 0.6 and i['fact'] <= 1.5 :
                df3 = df2.loc[df2['Time'] == i['time']]
                mean = format(sum(df3['Duration'])/len(df3['Duration']), ".2f")

                # intervale = int(30 / i['fact']) #//====> i['fact'] = 0.666 we'll have to replace it by 2 so that 30/2 = 15
                intervale = int(30 / 1)
                for j in range(1) :
                    minute =i['time'][3:]
                    # minute += str(j*intervale +int(minute) ) #//====>  += used to concat the 2 minutes values
                    minute = str(j*intervale +int(minute) )

                    time_  = i['time'][:3] + minute
                    if len(time_) == 4:
                        time_ = time_+"0"
                    adbreak_predict = AdbreaPredict(id_channel=1 ,channel_name ='2M' , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
                    adbreak_predict.save()
            if i['fact'] >= 1.6 and i['fact'] <= 2.5 :
                df3 = df2.loc[df2['Time'] == i['time']]
                mean = format(sum(df3['Duration'])/len(df3['Duration']), ".2f")


                # intervale = int(30 / i['fact']) #//====> i['fact'] = 0.666 we'll have to replace it by 2 so that 30/2 = 15
                intervale = int(30 / 2)
                for j in range(2) :
                    minute =i['time'][3:]
                    # minute += str(j*intervale +int(minute) ) #//====>  += used to concat the 2 minutes values
                    minute = str(j*intervale +int(minute) )

                    time_  = i['time'][:3] + minute
                    if len(time_) == 4:
                        time_ = time_+"0"
                    adbreak_predict = AdbreaPredict(id_channel=1 ,channel_name ='2M' , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
                    adbreak_predict.save()

            if i['fact'] >= 2.6 and i['fact'] <= 3.5 :
                df3 = df2.loc[df2['Time'] == i['time']]
                mean = format(sum(df3['Duration'])/len(df3['Duration']), ".2f")


                # intervale = int(30 / i['fact']) #//====> i['fact'] = 0.666 we'll have to replace it by 2 so that 30/2 = 15
                intervale = int(30 / 3)
                for j in range(3) :
                    minute =i['time'][3:]
                    # minute += str(j*intervale +int(minute) ) #//====>  += used to concat the 2 minutes values
                    minute = str(j*intervale +int(minute) )

                    time_  = i['time'][:3] + minute
                    if len(time_) == 4:
                        time_ = time_+"0"
                    adbreak_predict = AdbreaPredict(id_channel=1 ,channel_name ='2M' , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
                    adbreak_predict.save()

        # for i in fact :
        #     if i['fact'] >= 0.6 :
        #         df3 = df2.loc[df2['Time'] == i['time']]
        #         mean = sum(df3['Duration'])/len(df3['Duration'])
        #         mean = round(mean,2)
        #         intervale = int(30 // round(i['fact']))
        #         for j in range(round(i['fact'])) :
        #             minute =i['time'][3:]
        #             minute = str(j*intervale +int(minute) )
        #
        #             time_  = i['time'].split(':')[0]+':' + minute
        #             channel_id = Channels.objects.get(pk=channel)
        #             adbreak_predict = AdbreaPredict(id_channel=1 ,channel_name =channel_id.channel_name , datetime=day,day=str(day).split(' ')[0] ,time =time_,duration=mean)
        #             adbreak_predict.save()





@check_user
def add_agency(request):
    if request.method == 'POST':
        name_agency = request.POST.get('name_agency')
        description_agency = request.POST.get('description_agency')
        now = datetime.datetime.now()
        agency = Agency(id_user_id=request.session['id_user'],name=name_agency,description=description_agency,datetime=now,is_deleted=0)
        agency.save()
        now = datetime.datetime.now()
        activite = 'Add  Agency'
        desc = 'Admin Add  Agency  id: ' + str(agency.id_agency)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()
    return render(request, "DAIManagementApp/add_agency.html")

@check_user
def edit_agency(request , id_agency=""):
    if request.method == 'POST':
        agency = Agency.objects.get(pk=id_agency)

        data = {
            'agency':agency
        }
        return render(request , 'DAIManagementApp/edit_agency_id.html',{'data':data})
    agency = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0)
    return render(request , 'DAIManagementApp/edit_agency.html',{'agency':agency})

@check_user
def update_agency(request,id_agency=''):
    if request.method == 'POST':
        agency = Agency.objects.get(pk=id_agency)
        # Part 1
        name = request.POST.get('name_agency')
        description = request.POST.get('description_agency')


        agency.name=name
        agency.description=description

        agency.save()
        activite = 'Edit  Agency'
        desc = 'Admin Edit  Agency  id: ' + str(agency.id_agency)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()

        agency = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0)
        return render(request,'DAIManagementApp/edit_agency.html',{'agency': agency})

@check_user
def delete_agency(request,id_agency=''):
    if request.method == 'POST':
        agency = Agency.objects.get(pk=id_agency)
        print(agency.is_deleted)
        agency.datetime = datetime.datetime.now()
        agency.is_deleted = 1
        agency.save()
        activite = 'Delete   Agency'
        desc = 'Admin Delete  Agency  id: ' + str(agency.id_agency)
        activity = Activity(activity=activite , date=now ,description=desc )
        activity.save()

    # jingles = Jingles.objects.filter(id_channel__in=channels,is_delete=0)
    agency = Agency.objects.filter(id_user=request.session['id_user'],is_deleted=0)

    return render(request,'DAIManagementApp/delete_agency.html', {'agency': agency})


@check_user
def theme_mode(request):
    current_mode = request.GET.get('current_mode')
    if current_mode == 'light':
        body_class_theme = ''
    else:
        body_class_theme = 'dark-mode'

    request.session['theme_mode'] = body_class_theme
    return HttpResponse('')

def func_predict():
    schedule.every().day.at('00:00').do(predict_adbreak)
    # schedule.run_pending()
    while True:
        schedule.run_pending()
        time.sleep(1)

@check_user
def genExcel(request,id_campaign):
    import xlsxwriter
    import pandas as pd
    import datetime
    info = Campaigns.objects.get(pk=id_campaign)
    now = datetime.datetime.now()

    path = 'excel/user_'+request.session['id_user']
    if not os.path.exists(path):
        os.makedirs(path)
    path+= '/'+info.name+now.strftime('%d_%m_%Y')+'.xlsx'

    response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
    response['Content-Disposition'] = "attachment; filename="+info.name+now.strftime('%d_%m_%Y')+'.xlsx'
    workbook = xlsxwriter.Workbook(response, {'in_memory': True})
    worksheet = workbook.add_worksheet()

    cell_format = workbook.add_format()

    cell_format.set_pattern(1)  # This is optional when using a solid fill.
    cell_format.set_bg_color('#acbbfe')

    info = Campaigns.objects.get(pk=id_campaign)

    worksheet.write(1, 1,'Adtlas Reporting | ' +info.name+ ' cash [ ' +info.start_day + ' to ' + info.end_day + ' ] ' )

    ###################################################
    #############           INFO           ############
    ###################################################

    worksheet.write(7, 1, 'INFO' , cell_format)

    worksheet.write(8, 1, 'Campaign Name')
    worksheet.write(8, 2, info.name)

    worksheet.write(9, 1, 'Agency')
    worksheet.write(9, 2, info.id_brand.id_agency.name)

    worksheet.write(10, 1, 'Advertiser')
    worksheet.write(10, 2, info.id_brand.brand_name)

    worksheet.write(11, 1, 'Creative')
    worksheet.write(11, 2, info.id_adpost.adspot_name)

    #####################################################
    #############           PERIOD           ############
    #####################################################
    worksheet.write(13, 1, 'PERIOD' ,cell_format)

    worksheet.write(14, 1, 'Start date')
    worksheet.write(14, 2, info.start_day)

    worksheet.write(15, 1, 'End date')
    worksheet.write(15, 2, info.end_day)

    #####################################################
    #############           BOOKED           ############
    #####################################################

    worksheet.write(17, 1, 'BOOKED' ,cell_format)

    worksheet.write(18, 1, 'Volume')
    worksheet.write(18, 2, info.volume)

    worksheet.write(19, 1, 'Pacing')
    worksheet.write(19, 2, info.pacing == True)

    worksheet.write(20, 1, 'CPM')
    worksheet.write(20, 2, info.cpm)

    ########################################################
    #############           PLACEMENT           ############
    ########################################################

    worksheet.write(22, 1, 'PLACEMENT',cell_format)

    worksheet.write(23, 1, 'General Retation')
    worksheet.write(23, 2, info.general_rotation == 1)

    placement_channel = Placement.objects.filter(id_campaign=id_campaign).values_list('id_channel')
    placement_channel = [x[0] for x in list(placement_channel)]
    channels = Channels.objects.filter(id_channel__in =placement_channel ).values_list('channel_name', 'sfr_channel_name')
    channels_name = [i[0] for i in channels]
    channels_sfr = [i[1] for i in channels]
    worksheet.write(24, 1, 'Channels')
    worksheet.write(24, 2, str(channels_name).replace('\'' , ''))

    ########################################################
    #############           Tables              ############
    ########################################################
    worksheet.write(7, 6, 'CHANNEL' ,cell_format)
    worksheet.write(7, 7, 'REGION',cell_format)
    worksheet.write(7, 8, 'DAY',cell_format)
    worksheet.write(7, 9, 'MINUTE',cell_format)
    worksheet.write(7, 10, 'IMPRESSIONS',cell_format)

    name = info.id_adpost.adspot_name
    verifs = Verifs.objects.filter(spotId=name ,airStatuscode='0001').values_list('networkname','zonename','airTime')
    df= pd.DataFrame(verifs,columns=['Channel','Region','Time'])
    region = [ ChannelsZone.objects.get(zonename=i).region for i in df['Region']]
    channels = [Channels.objects.get(channel_name=i[0]).sfr_channel_name for i in df[['Channel','Region']].values ]
    df['Channel'] = channels
    df['Region'] = region

    day = [i.split(' ')[0] for i in df['Time']]
    minute = [i.split(' ')[1] for i in df['Time']]

    df['Day']=day
    df['Minute']=minute
    df.drop('Time' , axis='columns',inplace=True)
    df['Minute'] = [i[:-2]+'00' for i in df['Minute']]

    purcent=[]

    for i in df.values:
        purcent.append(SfrAnalytics.objects.get(sfr_channel_name=i[0],region=i[1],day=i[2],minute=i[3]).purcent)



    purcent = [float(i) for i in purcent]

    df['Purcent'] = purcent



    imp = Impressions.objects.get(pk='1')

    purcent = [float(i) * float(imp.total_users) / float(imp.market_share_purcent) for i in df['Purcent']]

    df['Purcent'] = purcent


    j=0
    for i in df.values:
        worksheet.write(8+j, 6, i[0])
        worksheet.write(8+j, 7, i[1])
        worksheet.write(8+j, 8, i[2])
        worksheet.write(8+j, 9, i[3])
        worksheet.write(8+j, 10, i[4])
        j+=1

    worksheet.write(7, 13, 'TOTAL IMPRESSIONS',cell_format)
    worksheet.write(8, 13, int(sum(purcent)))

    workbook.close()
    return response


@check_user
def test_rest(request , channel="", start_at="", duration=""):
    print("channel:"+channel)
    print("start_at:"+start_at)
    print("duration:"+str(duration))
    alldata = "channel:"+channel+"start_at:"+start_at+"duration:"+str(duration)


    return HttpResponse(alldata)


@check_user
def test_rest(request , channel="", start_at="", duration=""):
    print("channel:"+channel)
    print("start_at:"+start_at)
    print("duration:"+str(duration))
    alldata = "channel:"+channel+"start_at:"+start_at+"duration:"+str(duration)

@check_user
def realtime_adbreak(request,id_channel,start_at,duration,msg):
    from django.http import JsonResponse
    start=str(start_at.replace('_',' '))
    realtime=RealTimeAdbreak(id_channel=id_channel,start_at=start,duration=duration)
    realtime.save()
    # task(request, "https://videoapi.smartadserver.com/ac?siteid=385419&pgid=1633029&fmtid=92859&ab=1&tgt=&oc=1&out=vast3&ps=1&pb=0&visit=S&vcn=s&vph=%5BplayerHeight%5D&vpw=%5BplayerWidth%5D&vpmt=%5BplaybackMethod%5D&skip=&mabd=%5BmaxAdBreakDuration%5D&ctd=%5BcontentDuration%5D&tmstp=%5Btimestamp%5D&cklb=1",duration,id_channel)
    loop_vast(request, "https://videoapi.smartadserver.com/ac?siteid=385419&pgid=1633029&fmtid=92859&ab=1&tgt=&oc=1&out=vast3&ps=1&pb=0&visit=S&vcn=s&vph=%5BplayerHeight%5D&vpw=%5BplayerWidth%5D&vpmt=%5BplaybackMethod%5D&skip=&mabd=%5BmaxAdBreakDuration%5D&ctd=%5BcontentDuration%5D&tmstp=%5Btimestamp%5D&cklb=1",duration,id_channel)
    realtime_filter(id_channel, start, duration,msg) #here was a comment
    return JsonResponse({'channel':id_channel,
        'start_at':start,
        'duration':duration})


def realtime_filter(id_channel,start_at,duration, msg):#start_at
    FMT = '%Y-%m-%d %H:%M:%S'
    my_channel = Channels.objects.get(id_channel=id_channel)
    useraccess = Useraccess.objects.get(id_user=1)
    send_msg_telegram(my_channel.channel_name,start_at,duration, msg)

    if int(duration)<25:
        playlist=Playlists.objects.filter(id_channel=id_channel)
        channel_zone = ChannelsZone.objects.get(id_channel=id_channel, zonename="2005")
        otherplaylist = Playlists.objects.get(id_playlist=100)

        window=Windows.objects.filter(window_start__lte=start_at, window_end__gte=start_at).filter(id_playlist__in=playlist).last()
        new_window=Windows.objects.filter(id_playlist=otherplaylist).last()
        update_status = -1
        if (window):
            theid_playlist = window.id_playlist.id_playlist
            insertion_results_getter(theid_playlist)



            send_msg_telegram2("📣 Short adbreak detected, updating the playlist ...")

            update_status = 0
            print("I'm heeeere")
            print(window.id_playlist.id_playlist)
            preview_window_end = window.window_end
            from datetime import timedelta
            print(start_at)
            #start_at += timedelta(days=0, hours=0, minutes=-2)
            start_at = datetime.datetime.strptime(start_at, "%Y-%m-%d %H:%M:%S")

            # window.window_end = start_at
            # endo = window.window_end
            # window_duration = datetime.datetime.strptime(str(window.window_end), FMT) - datetime.datetime.strptime(str(window.window_start), FMT)
            # window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
            # window_duration = window_duration.strftime('%H%M%S00')
            # window.window_duration = window_duration
            # window.id_playlist = Playlists.objects.get(id_playlist=377)
            # window.save()
            print(window)
            start_at =  start_at + datetime.timedelta(minutes=15)
            # start_at = datetime.datetime.strptime(start_at, '%Y-%m-%d %H:%M:%S') + datetime.timedelta(minutes=2)

            # new_window_duration = datetime.datetime.strptime(str(preview_window_end), FMT) - datetime.datetime.strptime(str(start_at), FMT)
            # print("start_at"+str(start_at))
            # print("preview_window_end"+str(preview_window_end))
            # print("new window duration "+str(new_window_duration))
            # new_window_duration = datetime.datetime.strptime(str(new_window_duration), '%H:%M:%S')
            # new_window_duration = new_window_duration.strftime('%H%M%S00')
            # new_window = Windows(id_playlist_id=otherplaylist.id_playlist, window_start=start_at, window_end=preview_window_end, window_duration=new_window_duration )
            new_window.save()
            print(new_window)

            # Get the id of the adbreaks that are not Aired yet
            from django.db import connection
            cursor = connection.cursor()

            broadcastDateForQuery = str(window.id_playlist.broadcastdate).replace("-","")
            id_windowForQuery = window.id_window
            daydate = str(window.id_playlist.broadcastdate)
            daydate_xml = str(window.id_playlist.broadcastdate)
            daydate = daydate.replace("-","")


            # daydate0012 = daydate2

            # queries = """
            #                   SELECT Adspots_in_avail.id_avail FROM Avails
            #                   left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
            #                   left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
            #                   where ( Adspots_in_avail.positionInAvail = 1 and Verifs.airStatusCode <> 0001 ) and  Avails.id_window= %s
            #               """

            queries = """
                              SELECT Adspots_in_avail.id_avail FROM Avails
                              left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
                              left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
                              where (Verifs.airStatusCode <> 0001 or Verifs.airStatusCode is null) and Avails.id_window= %s
                          """

            data_tuple=(broadcastDateForQuery,id_windowForQuery)
            cursor.execute(queries,data_tuple)
            row = cursor.fetchall()
            if new_window:
                for r in row:
                    availoo = r[0]
                    print(r[0])
                    avail = Avails.objects.get(id_avail=r[0])
                    pprint(avail)
                    avail.id_window = new_window
                    windddd = avail.id_window
                    avail.avail_start = new_window.window_start
                    print("the new avail winodws : "+str(avail.avail_start ))
                    avail.save()

            pplay = Playlists.objects.get(id_playlist=theid_playlist)
            pplay.version = int(pplay.version) + 1
            # pplay.version = int(pplay.version)
            pplay.save()

            new_version = str(Playlists.objects.get(id_playlist=theid_playlist).version)
            new_version_draft = str(Playlists.objects.get(id_playlist=theid_playlist).draft_version)
            max_version_draft = new_version_draft
            max_version = new_version
            # window.id_playlist = Playlists.objects.get(id_playlist=377)
            # window.save()
            # new_version = int(max_version)
            # max_version_draft = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('draft_version')).get('draft_version__max')
            # new_version_draft = int(max_version_draft)
            xmlfilename = GenerateXMLfromDatabase(daydate_xml, id_channel, channel_zone.id_zone_channel, str(new_version), str(max_version_draft))
            path_inftp = my_channel.ftp_channel_name+'/schedules/'+channel_zone.region
            if(uploadFTP4(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)):
                update_status = 1


        if(update_status == 1):
            send_msg_telegram2("✅ Playlist updated")
        # if(update_status == 0):
        #     send_msg_telegram2("### Something happened, playlist was NOT updated ###")



def realtime_filter_new(id_channel,start_at,duration, msg):#start_at
    FMT = '%Y-%m-%d %H:%M:%S'
    my_channel = Channels.objects.get(id_channel=id_channel)
    useraccess = Useraccess.objects.get(id_user=1)
    send_msg_telegram(my_channel.channel_name,start_at,duration, msg)
    if int(duration)<25:
        playlist=Playlists.objects.filter(id_channel=id_channel)
        channel_zone = ChannelsZone.objects.get(id_channel=id_channel, zonename="2005")
        window=Windows.objects.filter(window_start__lte=start_at, window_end__gte=start_at).filter(id_playlist__in=playlist).last()
        update_status = -1
        if (window):
            send_msg_telegram2("📣 Short adbreak detected, updating the playlist ...")

            update_status = 0
            print("I'm heeeere")
            print(window.id_playlist.id_playlist)
            preview_window_end = window.window_end
            from datetime import timedelta
            print(start_at)
            #start_at += timedelta(days=0, hours=0, minutes=-2)
            start_at = datetime.datetime.strptime(start_at, "%Y-%m-%d %H:%M:%S")

            window.window_end = start_at
            endo = window.window_end
            window_duration = datetime.datetime.strptime(str(window.window_end), FMT) - datetime.datetime.strptime(str(window.window_start), FMT)
            window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
            window_duration = window_duration.strftime('%H%M%S00')
            window.window_duration = window_duration
            window.save()
            print(window)
            start_at =  start_at + datetime.timedelta(minutes=15)
            # start_at = datetime.datetime.strptime(start_at, '%Y-%m-%d %H:%M:%S') + datetime.timedelta(minutes=2)

            new_window_duration = datetime.datetime.strptime(str(preview_window_end), FMT) - datetime.datetime.strptime(str(start_at), FMT)
            print("start_at"+str(start_at))
            print("preview_window_end"+str(preview_window_end))
            print("new window duration "+str(new_window_duration))
            new_window_duration = datetime.datetime.strptime(str(new_window_duration), '%H:%M:%S')
            new_window_duration = new_window_duration.strftime('%H%M%S00')
            new_window = Windows(id_playlist_id=window.id_playlist.id_playlist, window_start=start_at, window_end=preview_window_end, window_duration=new_window_duration )
            new_window.save()
            theid_playlist = window.id_playlist.id_playlist
            print(new_window)

            # Get the id of the adbreaks that are not Aired yet
            from django.db import connection
            cursor = connection.cursor()

            broadcastDateForQuery = str(window.id_playlist.broadcastdate).replace("-","")
            id_windowForQuery = window.id_window
            daydate = str(window.id_playlist.broadcastdate)
            daydate_xml = str(window.id_playlist.broadcastdate)
            daydate = daydate.replace("-","")
            # daydate0012 = daydate2

            # queries = """
            #                   SELECT Adspots_in_avail.id_avail FROM Avails
            #                   left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
            #                   left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
            #                   where ( Adspots_in_avail.positionInAvail = 1 and Verifs.airStatusCode <> 0001 ) and  Avails.id_window= %s
            #               """

            queries = """
                              SELECT Adspots_in_avail.id_avail FROM Avails
                              left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
                              left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
                              where Avails.id_window= %s
                          """

            data_tuple=(broadcastDateForQuery,id_windowForQuery)
            cursor.execute(queries,data_tuple)
            row = cursor.fetchall()
            if new_window:
                for r in row:
                    availoo = r[0]
                    print(r[0])
                    avail = Avails.objects.get(id_avail=r[0])
                    pprint(avail)
                    avail.id_window = new_window
                    windddd = avail.id_window
                    avail.avail_start = new_window.window_start
                    print("the new avail winodws : "+str(avail.avail_start ))
                    avail.save()

            pplay = Playlists.objects.get(id_playlist=theid_playlist)
            pplay.version = int(pplay.version) + 1
            # pplay.version = int(pplay.version)
            pplay.save()

            new_version = str(Playlists.objects.get(id_playlist=theid_playlist).version)
            new_version_draft = str(Playlists.objects.get(id_playlist=theid_playlist).draft_version)
            max_version_draft = new_version_draft
            max_version = new_version
            # new_version = int(max_version)
            # max_version_draft = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('draft_version')).get('draft_version__max')
            # new_version_draft = int(max_version_draft)
            xmlfilename = GenerateXMLfromDatabase(daydate_xml, id_channel, channel_zone.id_zone_channel, str(new_version), str(max_version_draft))
            path_inftp = my_channel.ftp_channel_name+'/schedules/'+channel_zone.region
            if(uploadFTP4(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)):
                update_status = 1


        if(update_status == 1):
            send_msg_telegram2("✅ Playlist updated")
        # if(update_status == 0):
        #     send_msg_telegram2("### Something happened, playlist was NOT updated ###")





def realtime_filter_old(id_channel,start_at,duration, msg):#start_at
    FMT = '%Y-%m-%d %H:%M:%S'
    my_channel = Channels.objects.get(id_channel=id_channel)
    useraccess = Useraccess.objects.get(id_user=1)
    send_msg_telegram(my_channel.channel_name,start_at,duration, msg)
    if int(duration)<25:
        playlist=Playlists.objects.filter(id_channel=id_channel)
        channel_zone = ChannelsZone.objects.get(id_channel=id_channel, zonename="2005")
        window=Windows.objects.filter(window_start__lte=start_at, window_end__gte=start_at).filter(id_playlist__in=playlist).last()
        update_status = -1
        if (window):
            send_msg_telegram2("📣 Short adbreak detected, updating the playlist ...")

            update_status = 0
            print("I'm heeeere")
            print(window.id_playlist.id_playlist)
            preview_window_end = window.window_end
            from datetime import timedelta
            print(start_at)
            #start_at += timedelta(days=0, hours=0, minutes=-2)
            start_at = datetime.datetime.strptime(start_at, "%Y-%m-%d %H:%M:%S")

            window.window_end = start_at
            endo = window.window_end
            window_duration = datetime.datetime.strptime(str(window.window_end), FMT) - datetime.datetime.strptime(str(window.window_start), FMT)
            window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
            window_duration = window_duration.strftime('%H%M%S00')
            window.window_duration = window_duration
            window.save()
            print(window)
            start_at =  start_at + datetime.timedelta(minutes=15)
            # start_at = datetime.datetime.strptime(start_at, '%Y-%m-%d %H:%M:%S') + datetime.timedelta(minutes=2)

            new_window_duration = datetime.datetime.strptime(str(preview_window_end), FMT) - datetime.datetime.strptime(str(start_at), FMT)
            print("start_at"+str(start_at))
            print("preview_window_end"+str(preview_window_end))
            print("new window duration "+str(new_window_duration))
            new_window_duration = datetime.datetime.strptime(str(new_window_duration), '%H:%M:%S')
            new_window_duration = new_window_duration.strftime('%H%M%S00')
            new_window = Windows(id_playlist_id=window.id_playlist.id_playlist, window_start=start_at, window_end=preview_window_end, window_duration=new_window_duration )
            new_window.save()
            theid_playlist = window.id_playlist.id_playlist
            print(new_window)

            # Get the id of the adbreaks that are not Aired yet
            from django.db import connection
            cursor = connection.cursor()

            broadcastDateForQuery = str(window.id_playlist.broadcastdate).replace("-","")
            id_windowForQuery = window.id_window
            daydate = str(window.id_playlist.broadcastdate)
            daydate_xml = str(window.id_playlist.broadcastdate)
            daydate = daydate.replace("-","")
            # daydate0012 = daydate2

            # queries = """
            #                   SELECT Adspots_in_avail.id_avail FROM Avails
            #                   left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
            #                   left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
            #                   where ( Adspots_in_avail.positionInAvail = 1 and Verifs.airStatusCode <> 0001 ) and  Avails.id_window= %s
            #               """

            queries = """
                              SELECT Adspots_in_avail.id_avail FROM Avails
                              left join Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
                              left join Verifs on ( Verifs.broadcastDate = %s and Verifs.trafficId = Adspots_in_avail.trafficId )
                              where Avails.id_window= %s
                          """

            data_tuple=(broadcastDateForQuery,id_windowForQuery)
            cursor.execute(queries,data_tuple)
            row = cursor.fetchall()
            if new_window:
                for r in row:
                    availoo = r[0]
                    print(r[0])
                    avail = Avails.objects.get(id_avail=r[0])
                    pprint(avail)
                    avail.id_window = new_window
                    windddd = avail.id_window
                    avail.avail_start = new_window.window_start
                    print("the new avail winodws : "+str(avail.avail_start ))
                    avail.save()


            new_version = str(Playlists.objects.get(id_playlist=theid_playlist).version)
            new_version_draft = str(Playlists.objects.get(id_playlist=theid_playlist).draft_version)
            max_version_draft = new_version_draft
            max_version = new_version
            # new_version = int(max_version)
            # max_version_draft = Playlists.objects.filter(broadcastdate=str(daydate)).aggregate(Max('draft_version')).get('draft_version__max')
            # new_version_draft = int(max_version_draft)
            xmlfilename = GenerateXMLfromDatabase(daydate_xml, id_channel, channel_zone.id_zone_channel, str(new_version), str(max_version_draft))
            path_inftp = my_channel.ftp_channel_name+'/schedules/'+channel_zone.region
            if(uploadFTP4(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)):
                update_status = 1


        if(update_status == 1):
            send_msg_telegram2("✅ Playlist updated")
        # if(update_status == 0):
        #     send_msg_telegram2("### Something happened, playlist was NOT updated ###")

######################### Amine_section
def get_info_from_vast_file(url,duration):
    from bs4 import BeautifulSoup
    import concurrent.futures
    #dict_info={"Advertiser":"","Creative_id":"","Duration":"","MediaFile":{"id":"","delivery":"","type":"","width":"","height":"",
           # "scalable":"","maintainAspectRatio":"","url":""},"networkID":"","templateID":"","advertiserID":"","campaignID":"","insertionID":"",
        #  "siteID":"","pageID":"","formatID":""}
    list_id=[]
    result=int(duration)
    dict_all={}
    cte=0

    url = "http://ads.stickyadstv.com/www/delivery/swfIndex.php?reqType=AdsSetup&protocolVersion=2.0&zoneId=33011444&playerSize=720x576&_fw_gdpr=0&_fw_us_privacy=1---&_fw_did_idfv=8D9E1F6C-5A2B-7143-9038-62471DC58C24&_fw_atts=0&ltlg=48.856,2.352&_fw_deviceMake=settopbox&_fw_devicemodel=set-top_box&_fw_content_genre=generalist&_fw_content_rating=+14&_fw_is_lat=1&_fw_coppa=0&withOMSDK=false&_fw_gdpr=0&_fw_gdpr_consent=0&_fw_gdpr_consented_providers=0"
    print(url)
    my_headers = {"User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36 "}
    # page = requests.get(url,headers=my_headers)
    # soup = BeautifulSoup(page.content, "xml")
    # send_msg_telegram3(str(soup))

    #
    # url = 'http://207.180.254.4/vast.xml'
    # my_headers = {"User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36 "}

    # now1 = datetime.datetime.now()

    count = 0
    # page = requests.get(url)
    # soup = BeautifulSoup(page.content, "lxml")
# start of 10k 10000 requests

    # while count<500:
    #     page = requests.get(url)
    #     # media = soup.find("MediaFile").text
    #     # print(soup)
    #     count = count + 1
    #     if count < 500:
    #         if soup == BeautifulSoup(page.content, "xml"):
    #             print ("same page")
    #         else:
    #             print(BeautifulSoup(page.content, "xml"))
    #             send_msg_telegram3(str(BeautifulSoup(page.content, "xml")))
    #
    #         soup = BeautifulSoup(page.content, "xml")
    #     print("running VAST request number: "+str(count))
    #
    # def request_url(url, req_num):
    #     response = requests.get(url)
    #     print(f'Request number : {req_num}')
    #     return response
    #
    # with concurrent.futures.ThreadPoolExecutor() as executor:
    #     results = [executor.submit(request_url, url, i) for i in range(1, 9500)]
    #     for f in concurrent.futures.as_completed(results):
    #         # count += 1
    #         try:
    #             print(f.result())
    #         # soup = BeautifulSoup(results[count].content, "lxml")
    #         except Exception as e:
    #             print(f'Error: {e}')
# end of 10k 10000 requests

    # while result>3 and cte<5:
    # while false:
    #     #print('11111111111111')
    #     c=0
    #     print(list_id)
    #     url = "http://ads.stickyadstv.com/www/delivery/swfIndex.php?reqType=AdsSetup&protocolVersion=2.0&zoneId=33011444"
    #     # url="https://videoapi.smartadserver.com/ac?siteid=385419&pgid=1633029&fmtid=92859&ab=1&tgt=&oc=1&out=vast3&ps=1&pb=0&visit=S&vcn=s&vph=%5BplayerHeight%5D&vpw=%5BplayerWidth%5D&vpmt=%5BplaybackMethod%5D&skip=&mabd="+str(result)+"&ctd=%5BcontentDuration%5D&tmstp=%5Btimestamp%5D&cklb=1"
    #     print(url)
    #     my_headers = {"User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36 "}
    #     page = requests.get(url,headers=my_headers)
    #     # send_msg_telegram3(str(page))
    #     soup = BeautifulSoup(page.content, "xml")
    #     # send_msg_telegram3(str(soup))
    #
    #     try:
    #         dict_info={"Advertiser":"","Creative_id":"","Duration":"","MediaFile":{"id":"","delivery":"","type":"","width":"","height":"",
    #         "scalable":"","maintainAspectRatio":"","url":""},"networkID":"","templateID":"","advertiserID":"","campaignID":"","insertionID":"",
    #       "siteID":"","pageID":"","formatID":""}
    #
    #         dict_info['Advertiser']=soup.find("Advertiser").text
    #         dict_info['Creative_id']=soup.find("insertionID").text
    #         dict_info['Duration']=soup.find("Duration").text
    #         dict_info['MediaFile']['url']=soup.find("MediaFile").text
    #         dict_info['networkID']=soup.find_all('networkID')[0].text
    #         dict_info['templateID']=soup.find_all('templateID')[0].text
    #         dict_info['advertiserID']=soup.find_all('advertiserID')[0].text
    #         dict_info['campaignID']=soup.find_all('campaignID')[0].text
    #         dict_info['insertionID']=soup.find_all('insertionID')[0].text
    #         dict_info['siteID']=soup.find_all('siteID')[0].text
    #         dict_info['pageID']=soup.find_all('pageID')[0].text
    #         dict_info['formatID']=soup.find_all('formatID')[0].text
    #
    #         c=int(dict_info['Duration'].split(':')[-1].split('.')[0])
    #         print(c)
    #
    #         if dict_info['Creative_id'] not in list_id:# and result >c:
    #             list_id.append(dict_info['Creative_id'])
    #             dict_all['dict'+dict_info['Creative_id']]=dict_info
    #             result=result-c
    #         else:
    #             result=result
    #             cte=cte+1
    #         print('result!!!!!!!!',result)
    #         print(cte)
    #
    #     except :
    #         print(f'the url {url} is empty',url)
    #         cte=cte+1
    return dict_all

@check_user
def pending(request):

    urls=Pending.objects.all()
    return render(request, 'DAIManagementApp/pending.html',context={'urls':urls})

@check_user
def task(request, url,duration,id_channel):
    #get the today date
    channel_id = id_channel
    channeldata = Channels.objects.get(id_channel=channel_id)
    useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
    os.system("echo 'inside the task function' > a_log/log.txt")
    to_day_date=datetime.datetime.now().strftime('%Y-%m-%d')
    start_date=datetime.datetime.strptime(to_day_date+' 00:01:00', "%Y-%m-%d %H:%M:%S")
    end_date=datetime.datetime.strptime(to_day_date+' 23:59:00', "%Y-%m-%d %H:%M:%S")
    now=datetime.datetime.now()
    now_str=str(now).split('.')[0]

    now_minus2=datetime.datetime.now() - datetime.timedelta(minutes=2)
    now_minus2=str(now_minus2).split('.')[0]

    now_plus10=datetime.datetime.now() + datetime.timedelta(minutes=7)
    now_plus10=str(now_plus10).split('.')[0]


    #check in playlist table if today_date exists
    check_value = Playlists.objects.filter(broadcastdate=to_day_date)
    #if not exist.
    if check_value:
        #if there is a playlist = get the max version of today (v) and
        #call to duplicate function
        Playlist = Playlists.objects.filter(broadcastdate=to_day_date).order_by('-version')[0]
        max_version = Playlist.version
        # max_version = Playlists.objects.filter(broadcastdate=to_day_date).aggregate(Max('version')).get('version__max')
        new_version = int(max_version)+1
        Playlist.version = new_version
        Playlist.save()
        current_traffic_total = 0
        all_windows = Windows.objects.filter(id_playlist=Playlist)
        for wind in all_windows:
            all_avails = Avails.objects.filter(id_window=wind.id_window)
            for av in all_avails:
                all_spots = AdspotsInAvail.objects.filter(id_avail=av.id_avail)
                for ad in all_spots:
                    current_traffic_total+=1


        #update it with (v+1) in the same line

        try:
            window=Windows.objects.get(id_playlist=Playlist, window_start__lte=now_str, window_end__gte=now_str)
        except Windows.DoesNotExist:
            window=Windows.objects.filter(id_playlist=Playlist, window_start__lte=now_str, window_end__gte=now_str)

        # window=Windows.objects.get(id_playlist=Playlist, window_start__lte=now_str, window_end__gte=now_str)
        if window:
            id_window=window.id_window
            avail_start = window.window_start
            numberofavails = Avails.objects.filter(id_window=window.id_window).count()
            availinwindow = int(numberofavails) + 1
            FMT = '%Y-%m-%d %H:%M:%S'
            window_duration = datetime.datetime.strptime(str(window.window_end), FMT) - datetime.datetime.strptime(str(window.window_start), FMT)
            window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
            window_duration = window_duration.strftime('%H%M%S00')

        else:
            #creation of window,
            #checking if there is a window before 2 minutes:
            try:
                prev_window=Windows.objects.get(id_playlist=Playlist, window_start__lte=now_minus2, window_end__gte=now_minus2)
            except Windows.DoesNotExist:
                prev_window=Windows.objects.filter(id_playlist=Playlist, window_start__lte=now_minus2, window_end__gte=now_minus2)

            if prev_window:
                new_widdow_start = prev_window.window_end
            else:
                new_widdow_start = now_minus2

            #checking if there is a window after 10 minutes:
            try:
                next_window=Windows.objects.get(id_playlist=Playlist, window_start__lte=now_plus10, window_end__gte=now_plus10)
            except Windows.DoesNotExist:
                next_window=Windows.objects.filter(id_playlist=Playlist, window_start__lte=now_plus10, window_end__gte=now_plus10)
            if next_window:
                new_widdow_end = next_window.window_start
            else:
                new_widdow_end = now_plus10
            window_duration = '00120000'




            #calcul of window end - window start
            #creation of the window:
            new_window = Windows(id_playlist_id=Playlist.id_playlist, window_start=new_widdow_start, window_end=new_widdow_end, window_duration=window_duration )
            new_window.save()
            id_window=new_window.id_window
            avail_start = new_window.window_start
            availinwindow = 1

            #avail creation
        Avail = Avails(id_window_id=id_window, avail_start=avail_start, availinwindow=availinwindow, datetime=now )
        Avail.save()


    if not check_value:
        #if there is no playlist, create a new one with version = 1
        Playlist = Playlists(id_channel_id=id_channel ,version=1, broadcastdate=to_day_date, start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=1,is_draft='0',draft_version='0')
        Playlist.save()
        #-- create one big window of 24h interval
        Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=str(start_date), window_end=str(end_date), window_duration='23580000' )
        Window.save()
        new_version = 1
        #-- create avail
        current_traffic_total = 0
        id_window=Window.id_window
        avail_start = Window.window_start
        Avail = Avails(id_window_id=id_window, avail_start=avail_start, availinwindow=1, datetime=now )
        Avail.save()

    dict_all=get_info_from_vast_file(url,duration)
    positioninavail = 0
    for key,value in dict_all.items():
        positioninavail+=1
        # Creative_id=value['Creative_id']
        Creative_url=value['MediaFile']['url']
        Creative_url = Creative_url.replace(" ","")
        try:
            adpost=Adspots.objects.get(url_from_vast=Creative_url)
        except Adspots.DoesNotExist:
            adpost=Adspots.objects.filter(url_from_vast=Creative_url)

        if adpost:
            current_traffic_total += 1
            AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adpost.id_adpost, positioninavail=positioninavail, trafficid=current_traffic_total)
            AdspotsInAv.save()

        else:
            creative_id = value['Creative_id']
            url=value['MediaFile']['url']
            if not Pending.objects.filter(creative_id=creative_id,url=url).exists():
                panding=Pending(creative_id=creative_id,url=url)
                panding.save()
                send_msg_telegram("New Creative Detected in VAST <a href='"+url+"'>(Video_link)</a>")


    #hard-coded to France / 2005
    zonename = "2005"
    channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)
    # file generation + uploadFTP
    xmlfilename = GenerateXMLfromDatabase(to_day_date, channel_id, channel_zone.id_zone_channel, str(new_version) )
    path_inftp = channeldata.ftp_channel_name+'/schedules/'+channel_zone.region
    # uploadFTP(useraccess.ftp_server, useraccess.ftp_port, useraccess.ftp_user, useraccess.ftp_password, xmlfilename , path_inftp)


@check_user
def loop_vast(request, url,duration,id_channel):

#     import urllib
#
# # link = "http://www.somesite.com/details.pl?urn=2344"
#     f = urllib.urlopen(url)
#     myfile = f.read()
#     send_msg_telegram2("Vast content:"+str(myfile))



    dict_all=get_info_from_vast_file(url,duration)
    positioninavail = 0
    for key,value in dict_all.items():
        creative_id = value['Creative_id']
        url=value['MediaFile']['url']
        if not Pending.objects.filter(creative_id=creative_id,url=url).exists():
            panding=Pending(creative_id=creative_id,url=url)
            panding.save()
            send_msg_telegram2("New Creative Detected in VAST <a href='"+url+"'>(Video_link)</a>")

########################################################### End amine section

@check_user
def realtime_adspot(request, start_at, adspot):
    from django.http import JsonResponse
    start=str(start_at.replace('_',' '))
    send_ad_telegram(start, adspot)
    return JsonResponse({'start_at':start,
        'adpost':adspot})

def send_adspot(DST_FOLDER ,SRC_FILEPATH , ) :
    import os
    import ftplib
    FTP_ADDR = "uk06.tmd.cloud"
    USERNAME = "testftp@epgsano.com"
    PASSWORD = "I?#=s3FfnSu_"

    """Transfer file to FTP."""

    # Connect
    print("Connecting to FTP...")
    session = ftplib.FTP(FTP_ADDR, USERNAME, PASSWORD)


    # Change to target dir
    chdir(session, dirpath=DST_FOLDER)

    # Transfer file
    print("Transferring %s and storing as %s..." % (os.path.basename(SRC_FILEPATH), SRC_FILEPATH))
    with open(SRC_FILEPATH, "rb") as file:
        session.storbinary('STOR %s' % os.path.basename(SRC_FILEPATH), file)

    print("Closing session.")
    # Close session
    session.quit()


def chdir(session, dirpath):
    """Change to directory."""
    if directory_exists(session, dirpath) is False: # (or negate, whatever you prefer for readability)
        print("Creating folder %s..." % dirpath)
        folders = dirpath.split('/')

        session.mkd(folders[0])
        session.mkd(folders[0]+'/'+folders[1])

    print("Changing to directory %s..." % dirpath)
    session.cwd(dirpath)


def directory_exists(session, dirpath):
    """Check if remote directory exists."""
    filelist = []
    session.retrlines('LIST',filelist.append)
    for f in filelist:
        if f.split()[-1] == dirpath and f.upper().startswith('D'):
            return True
    return False



import telegram # this is from python-telegram-bot package

from django.conf import settings
from django.template.loader import render_to_string
import requests

def send_msg_telegram(channel_name,start_at,duration, msg):
    message = """
    ====================
    New Adbreak Detected: ["""+msg+"""]
    - channel: """+channel_name+"""
    - Time: ["""+start_at+"""]
    - Duration : ["""+duration+"""]
    ====================
    """
    telegram_settings = settings.TELEGRAM
    bot = telegram.Bot(token=telegram_settings['bot_token'])
    bot.send_message(chat_id="@%s" % telegram_settings['channel_name'],
    # bot.send_message(chat_id=telegram_settings['chat_id'],
        text=message, parse_mode=telegram.ParseMode.HTML)


def send_msg_telegram2(msg):
    message = """
    ["""+msg+"""]
    """
    telegram_settings = settings.TELEGRAM
    bot = telegram.Bot(token=telegram_settings['bot_token'])
    bot.send_message(chat_id="@%s" % telegram_settings['channel_name'],
        text=message, parse_mode=telegram.ParseMode.HTML)

def send_msg_telegram3(msg):
    message = """
    ["""+msg+"""]
    """
    telegram_settings = settings.TELEGRAM
    bot = telegram.Bot(token=telegram_settings['bot_token'])
    bot.send_message(chat_id="@%s" % telegram_settings['channel_name'],
        text=message)


def send_ad_telegram(start_at, adspot):
    message = """
    ====================
    Adspot detected: """+adspot+"""
    - Time: ["""+start_at+"""]
    ====================
    """
    telegram_settings = settings.TELEGRAM
    bot = telegram.Bot(token=telegram_settings['bot_token'])
    bot.send_message(chat_id="@%s" % telegram_settings['channel_name'],
        text=message, parse_mode=telegram.ParseMode.HTML)


def directory_exists(session, dirpath):
    """Check if remote directory exists."""
    filelist = []
    session.retrlines('LIST',filelist.append)
    for f in filelist:
        if f.split()[-1] == dirpath and f.upper().startswith('D'):
            return True
    return False

@check_user
def statscreative(request ):
    print('jjjjjjjjjjjjjjjjjjjjjjjjjj')
    from django.db.models import Count
    channel = Channels.objects.filter(id_user=request.session['id_user']).values_list('channel_name')
    channel = [ch[0] for ch in channel]
    adspot = Verifs.objects.filter(networkname__in = channel ,    airStatuscode='0001' ).values('spotId').annotate(dcount=Count("spotId")).order_by('-dcount')
    import random
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(adspot))]
    label =[]
    data = []

    for ad in adspot :
        label.append(ad['spotId'])
        data.append(ad['dcount'])
    print(label)
    print(data)
    verifs = {  'lebel' : label ,
                'data'  : data ,
                'color' : color
            }
    brands = ads_brand(request.session['id_user'] )
    return render(request,'DAIManagementApp/stats_creative.html',{'verifs':verifs  , 'brands':brands})

def ads_brand(session):
    from django.db.models import Count
    import random
    channel = Channels.objects.filter(id_user=session)
    ads = Adspots.objects.filter(id_channel__in = channel).values('id_brand').annotate(count = Count('id_brand')).order_by('-count')
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(ads))]
    label = []
    data = []

    for ad in list(ads ):
        label.append(Brands.objects.get(pk=ad['id_brand']).brand_name)
        data.append(round(ad['count'] * 100 / 23,2 ))

    brand  =     verifs = {  'label' : label ,
                'data'  : data ,
                'color' : color
            }

    return brand

@check_user
def statscampaign(request):
    campaign = Campaigns.objects.filter(id_user=request.session['id_user'] ).values_list('name','volume').order_by('-volume')
    pacing = campaign_pacing(request)
    brands = campign_brand(request)
    active = campaign_avtive(request)
    cpms = Campaigns.objects.filter(id_user=1).order_by('-cpm')[:4]
    return render(request ,'DAIManagementApp/stats_campaign.html',{'pacing' : pacing , 'active':active  , 'brands':brands , 'cpms' : cpms })

@check_user
def campaign_pacing(request):
    pacing = Campaigns.objects.filter(id_user=1).values_list('pacing')
    pacing = [p[0] for p in pacing ]

    result = { 'true'  : pacing.count(True) ,
               'false' : pacing.count(False) ,
                'purcent' : pacing.count(True) * 100 / len(pacing)
            }
    return result

@check_user
def campaign_avtive(request):
    from datetime import datetime
    import random
    etat = Campaigns.objects.filter(id_user = request.session['id_user'] ).exclude(start_day__exact='' ).exclude(end_day__exact='').values_list('start_day','end_day' )

    etat = [(datetime.strptime(date[0], '%Y-%m-%d'),datetime.strptime(date[1], '%Y-%m-%d')) for date in etat ]
    now = datetime.now()
    finished =0
    not_start = 0
    start = 0
    for date in etat :
        if now < date[0]:
            not_start += 1
        elif now < date[0] :
            start += 1
        else :
            finished += 1
    data = [finished , start , not_start ]
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(3)]
    label = ['Finished' , 'In progress' , 'Not started']

    result = {
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return result

@check_user
def campign_brand(request):
    from django.db.models import Count
    import random

    brand = Campaigns.objects.filter(id_user=request.session['id_user']).values('id_brand').annotate(dcount=Count("id_brand")).order_by('-dcount')
    data = []
    label = []
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(brand))]


    for i in brand :
        label.append(Brands.objects.get(pk=i['id_brand']).brand_name)
        data.append(i['dcount'])

    result = {
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return result

@check_user
def statsbrands(request):

    from django.db.models import Count
    ch = Channels.objects.filter(id_user=1)
    ads = Campaigns.objects.filter(id_user=request.session['id_user']).values('id_brand').annotate(dcount=Count("id_brand")).order_by('-dcount')
    label = []
    data = []
    import random
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(ads))]
    for ad in ads :
        label.append(Brands.objects.get(id_brand=ad['id_brand']).brand_name)
        data.append(ad['dcount'])

    brands={
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return render(request ,'DAIManagementApp/stats_brands.html', { 'brands' : brands}   )

    def brand_adspot():
        from django.db.models import Count
        ch = Channels.objects.filter(id_user=1)
        verifs  = Verifs.objects.filter(networkname__in = ch ,    airStatuscode='0001' ).values_list('spotId')
        verifs = [verif[0] for verif in verifs ]
        ads = Adspots.objects.filter(adspot_name__in = v,id_channel__in = ch ).values('id_brand').annotate(dcount=Count("id_brand")).order_by('-dcount')
        import random
        chars = '0123456789ABCDEF'
        color = ['#'+''.join(random.sample(chars,6)) for i in range(len(ads))]
        label = []
        data = []

        for ad in ads :
            label.append(Brands.objects.get(id_brand=ad['id_brand']).brand_name)
            data.append(ad['dcount'])

@check_user
def statschannels(request):
    from django.db.models import Count
    import random
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    zone = ChannelsZone.objects.filter(id_channel__in =channels).values('region').annotate(dcount=Count("id_channel")).order_by('-dcount')

    label = [region['region'] for region in zone]
    data = [data['dcount'] for data in zone]

    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(zone))]
    channels={
        'label':label,
        'data' : data,
        'color': color
    }
    ads=channels_ads(request)
    adbreak = channels_adbreak(request)
    sfr = channels_sfr(request)
    return render(request,'DAIManagementApp/stats_channels.html',{'channels':channels , 'ads':ads , 'adbreak':adbreak , 'sfr':sfr})

@check_user
def channels_ads(request):
    from django.db.models import Count
    import random
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    ads = Adspots.objects.filter(id_channel__in = channels).values('id_channel').annotate(dcount=Count('id_channel')).order_by('-dcount')
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(ads))]
    label= []
    data=[]
    for ad in ads :
        label.append(Channels.objects.get(id_channel=ad['id_channel']).channel_name)
        data.append(ad['dcount'])

    result={
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return result

@check_user
def channels_adbreak(request):
    import random
    import pandas as pd
    from django.db.models import Count
    channels = Channels.objects.filter(id_user=request.session['id_user']).values_list('channel_name')
    channels = [channel[0] for channel in channels]
    data = AdbreakHistory.objects.filter(channel_name__in=channels).values_list('channel_name','day','time')
    df = pd.DataFrame(data,columns=['Channel' , 'Day' ,'Time'])

    label = []
    data = []
    channels = list(set(df['Channel']))
    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(channels))]
    for channel in channels :
        df2 = df.loc[df['Channel']==channel].groupby(by=['Channel','Day']).count()
        data.append(min(df2['Time']))
        label.append(channel)
    result={
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return result

@check_user
def channels_sfr(request):
    import random
    import pandas as pd

    channels = channels = Channels.objects.filter(id_user=request.session['id_user']).values_list('sfr_channel_name')
    channels = [channel[0] for channel in channels]

    sfr = SfrAnalytics.objects.filter(sfr_channel_name__in = channels).values_list('sfr_channel_name' , 'purcent')

    df = pd.DataFrame(sfr,columns=['Channel','Purcent'])
    purcent  = [float(i) for i in df['Purcent']]
    df['Purcent'] = purcent
    total_users = int(Impressions.objects.get(pk=2).total_users)
    market_share_purcent = float(Impressions.objects.get(pk=2).market_share_purcent)

    df2 =  df.groupby(by=['Channel']).mean()
    label = list(df2.index)
    data = [i[0]*total_users*market_share_purcent for i in df2.values ]

    chars = '0123456789ABCDEF'
    color = ['#'+''.join(random.sample(chars,6)) for i in range(len(data))]
    result={
        'label' : label ,
        'data'  : data ,
        'color' : color
    }

    return result

@check_user
def generate_verifs(request):

    if request.method == "POST":
        import pandas as pd
        from django.db import connection
        cursor = connection.cursor()
        channels = request.POST.get('channels')
        adspots = request.POST.get('adspots')
        start_day = request.POST.get('start_day')
        end_day = request.POST.get('end_day')
        print(start_day)
        print(end_day)
        start_day = str(start_day).replace('-','')
        end_day = str(end_day).replace('-','')
        q = """
            select * from Verifs
            LEFT JOIN SFR_analytics on SUBSTRING(Verifs.airTime, 12, 5) = SUBSTRING(SFR_analytics.`minute`, 1, 5) and SFR_analytics.sfr_channel_name = '{}'
            where Verifs.spotId LIKE '%{}%' and Verifs.airStatusCode = '0001' and Verifs.broadcastDate > '{}' and Verifs.broadcastDate < '{}'
        """
        try :
            cursor.execute(q.format(channels,adspots,start_day,end_day))
            row = cursor.fetchall()
            print(row)
            df = pd.DataFrame(row)
            df2 = df[[0,12,14,3,4,5,6,7,8,15,16,17,18]]
            df2.columns = ['id_verifs','Channel','Region','broadcastdate','trafficId','spotId','airTime','airLenght','airStatus','cible','indicateur','minute','purcent']

            with BytesIO() as b:
                # Use the StringIO object as the filehandle.
                writer = pd.ExcelWriter(b, engine='xlsxwriter')
                df2.to_excel(writer,index = False)
                writer.save()
                # Set up the Http response.
                filename = start_day+'_'+adspots+'.xlsx'
                response = HttpResponse(
                    b.getvalue(),
                    content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
                )
                response['Content-Disposition'] = 'attachment; filename=%s' % filename
                return response

        except :
            print("===================================================")
            print("===================================================")
            print("===================================================")

    channels = Channels.objects.filter(id_user = request.session['id_user'])

    adspots = Adspots.objects.filter(id_channel__in = channels).values_list('adspot_name')
    adspots = [ads[0] for ads in adspots]
    channels = [ch.sfr_channel_name for ch in channels ]

    print(channels)
    return render(request,'DAIManagementApp/generate_verifs.html',{'adspots':adspots , 'channels' : channels})

@check_user
def upload_bouygues(request):
    if request.method == 'POST':
        filename = str(request.FILES['bouygues_file']).replace(' ','_')
        print(filename)
        path = "bouygues/user_"+ request.session['id_user']
        if not os.path.exists(path):
                os.makedirs(path)
        handle_uploaded_file(request.FILES['bouygues_file'], path, filename)
        insert_bouygues(path+'/'+filename)
    return render(request , "DAIManagementApp/bouygues_upload.html" )

def insert_bouygues(file):
    import pandas as pd
    from datetime import datetime ,timedelta
    df = pd.read_excel(file)

    date = df.columns[4]
    date = date.replace('au ','').replace('du ','').strip()
    date = date.split(' ')
    start_day = datetime.strptime(date[0],'%d/%m/%Y')
    end_day = datetime.strptime(date[1],'%d/%m/%Y')
    periode = pd.date_range(start=start_day,end=end_day)


    columns =  ["cols_"+ str(i) for i in range(len(df.columns))]
    df.columns = columns
    cible = df['cols_4'][2]

    minute =list(df['cols_3'][13:])

    for day in periode :
        for col in df.columns[4:] :
            channel = df[col][12]
            val = list(df[col][13:])
            for i in range(0,len(minute)):
                if i < 1260 :
                    bouygue = Bouygues_analytics(channel_name=channel,day=day,cible=cible,minute=minute[i],purcent=val[i]*2)
                    bouygue.save()
                else :
                    day_2 = day +  timedelta(days=1)
                    bouygue = Bouygues_analytics(channel_name=channel,day=day_2,cible=cible,minute=minute[i],purcent=val[i]*2)
                    bouygue.save()

@check_user
def charts_test(request):
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    return render(request,'DAIManagementApp/charts.html',{'channels':channels})

@check_user
def sfr_channel(request):
    import datetime
    channel = request.GET.get('channel')
    region = request.GET.get("region")
    day = request.GET.get("day")
    print(region)

    channel = Channels.objects.get(id_channel=channel).sfr_channel_name
    region = ChannelsZone.objects.get(zonename= region).region

    val = Sfr_analytics.objects.filter(sfr_channel_name=channel, region =region, day=day).values_list("minute","purcent")
    purcent = Impressions.objects.get(pk='1')
    nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)
    labels  = [x[0] for x in val ]
    data  = [int(float(x[1])*nb) for x in val]
    data = {'data':data , 'label':labels}
    return JsonResponse(data, safe=True)

def adspost_avail():
    from datetime import datetime
    now = datetime.now()
    campaigns = Placement.objects.filter(id_channel=1).values_list('id_campaign')

    campaigns = [i[0] for i in campaigns ]
    adspot = Campaigns.objects.filter(id_campaign__in = campaigns , end_day__lt=now).values_list('id_adpost').distinct()
    adspot = [i[0] for i in adspot]

    avail = AdspotsInAvail.objects.filter(id_adspot__in=adspot)
    avail = [av[0] for av in avail]

@check_user
def channels_campaigns(request):
    from datetime import datetime
    now = datetime.now()
    channel = request.GET.get('channel')
    campaigns = Placement.objects.filter(id_channel=channel).values_list('id_campaign')
    channel = Channels.objects.get(id_channel=request.GET.get('channel'))
    campaigns = [i[0] for i in campaigns ]
    campaigns = Campaigns.objects.filter(id_campaign__in = campaigns , end_day__lt=now)
    data = {"campaigns":campaigns}
    return render(request,'DAIManagementApp/dropdown_compaigns.html',{"campaigns":campaigns})

@check_user
def delete_avail(request):
    from datetime import datetime ,timedelta
    from iteration_utilities import deepflatten

    channel2 = request.GET.get('channel')
    playlist = request.GET.get('playlist')
    region =  request.GET.get('region')
    result = []
    channel = Channels.objects.get(id_channel=channel2)
    print(channel)
    channel_id = int(channel2)
    channel_networkname = channel.networkname
    now = datetime.now()

    if playlist != None:
        now = datetime.strptime(playlist,   '%Y-%m-%d')

    broadcast_playlist = now.strftime('%Y-%m-%d')
    broadcast_verif = now.strftime('%Y%m%d')
    n = now.strftime('%Y-%m-%d')


    print(channel)
    print(type(channel))
    print(region)
    data = Playlists.objects.last()
    print(data)
    playlists=[]
    # if region != None :
    #     region =ChannelsZone.objects.get(zonename=region)
    #
    #     playlists = Playlists.objects.filter(id_zone_channel=region)
    # if channel != None :
    #     channel = Channels.objects.get(id_channel=channel)
    #     if len(playlists)==0:
    #         playlists = Playlists.objects.filter(id_channel=channel)
    #     else :
    #         playlists = playlists.filter(id_channel=channel)
    # if playlist != None:
    #     now = datetime.strptime(playlist,   '%Y-%m-%d')
    #     n= now.strftime('%Y-%m-%d')
    #     t = datetime.strptime(n,'%Y-%m-%d') +timedelta(hours=4)
    #     #data = data.filter(start_date__gte=playlist, end_date__lt=playlist1 )
    # else :
    #     now = datetime.now()
    #     n= now.strftime('%Y-%m-%d')
    #     t = datetime.strptime(n,'%Y-%m-%d') +timedelta(hours=4)
    print(playlists)

    from django.db import connection
    cursor = connection.cursor()


    queries = """
                    SELECT Avails.id_avail FROM Windows
                    left join Playlists on Playlists.id_playlist = Windows.id_playlist
                    LEFT JOIN Avails on Avails.id_window = Windows.id_window
                    LEFT JOIN Adspots_in_avail on Adspots_in_avail.id_avail = Avails.id_avail
                    LEFT JOIN Verifs on (Verifs.trafficId = Adspots_in_avail.trafficId and Verifs.broadcastDate =  %s and Verifs.networkname = %s)
                    where Verifs.airStatusCode = "1005" and Playlists.id_channel = %s and Playlists.id_playlist in (Select max(id_playlist) from Playlists where Playlists.broadcastdate = %s group by id_zone_channel order by Playlists.version desc) group by Avails.id_avail limit 3
                  """
    print(queries)
    data_tuple=(broadcast_verif, channel_networkname, channel_id, broadcast_playlist )
    cursor.execute(queries,data_tuple)
    row = cursor.fetchall()
    print("row")
    print(row)


    # windows = Windows.objects.filter(window_start__gte=now,window_end__lt=t).select_related('id_playlist').filter(broadcastDate = n).last().order_by('window_start')[:3]
    # windows = Windows.objects.filter(window_start__gte=now,window_end__lt=t,id_playlist__in=playlists).order_by('window_start')[:3]

    # if len(playlists) > 0 :
    #     windows = Windows.objects.filter(window_start__gte=now,window_end__lt=t,id_playlist__in=playlists).order_by('window_start')[:3]
    # else :
    #     windows = Windows.objects.filter(id_playlist=data).order_by('window_start')[:3]

    for id_avs in row:
        for id_av in id_avs:
            avail = Avails.objects.get(pk=id_av)
            av={}
            av['id_avail'] = avail.id_avail
            av['start'] = avail.avail_start
            av['adspot'] = ""
            adspots = AdspotsInAvail.objects.filter(id_avail=avail)
            av['somme'] = 0
            av['duration' ]=""
            for adspot in adspots :
                av['adspot'] += " / " + adspot.id_adspot.adspot_name
                av['duration'] +=  " / " + str(adspot.id_adspot.duration)
                av['somme'] += int(adspot.id_adspot.duration)

            result.append(av)

            avail=''


    return render(request,'DAIManagementApp/avails_details.html' ,{'avails':result, 'queries':row})

@check_user
def load_charts(request):

    channel_id = request.GET.get('channel')
    region = request.GET.get('region')
    daydate = request.GET.get('daydate')
    print("load_charts ====== " , daydate  )
    # ====================================
    # ====================================
    #code from index to clean
    if (daydate != None):
        day = daydate
        day = datetime.datetime.strptime(day, '%Y-%m-%d')
        dayformat = day.strftime('%Y-%m-%d')

    else:
        day =  datetime.datetime.now()
        dayformat = day.strftime('%Y-%m-%d')

    if (region != None):
        zonename = ChannelsZone.objects.filter(zonename__contains=region)[0].region
        print(zonename)
    else:
        zonename = "France"

    # day = datetime.datetime.strptime(day, '%Y-%m-%d')
    # dayformat = day.strftime('%Y-%m-%d')
    #user = 1 ==> make it dynamique
    if (channel_id != None):
        channels_sfr = list(Channels.objects.filter(id_user=request.session['id_user'], id_channel = channel_id).values_list("sfr_channel_name",flat=True))
    else:
        channels_sfr = list(Channels.objects.filter(id_user=request.session['id_user']).values_list("sfr_channel_name",flat=True))

    val = Sfr_analytics.objects.filter(sfr_channel_name__in=channels_sfr, region=zonename ,day=str(dayformat))
    result = []
    channel_sfr=[]
    purcent = Impressions.objects.get(pk='1') # change the name impressions to SFR_market_share
    for channel in channels_sfr :
        res= val.filter(sfr_channel_name=channel).values_list('minute','purcent')
        if len(res)>0:

            nb  =  float(purcent.total_users) / float(purcent.market_share_purcent)
            labels  = [x[0] for x in res ]
            data  = [int(float(x[1])*nb) for x in res]
            result.append( data)
            channel_sfr.append(channel)
    import random

    color = colors

    channels =Channels.objects.filter(id_user = request.session['id_user']).values_list('channel_name')
    channels = [x[0] for x in channels]
    lines  = Verifs.objects.filter(airStatuscode="0001",broadcastDate="20211024")
    # lines  = Verifs.objects.all()
    data2 = []

    for line in lines:
        if line.networkname in channels :
            p ={
                'channel':line.networkname,
                'name' : line.spotId,
                'day' : str(line.airTime).split(' ')[0],
            }

            region = ChannelsZone.objects.filter(zonename =line.zonename)[0]
            p['region'] = region.region
            min = str(line.airTime).split(' ')[1].split(':')
            minute = min[0]+':'+min[1]+':00'
            p['minute'] = minute
            p['color'] = '#00800000'
            if  str(line.airStatuscode) == '0001':
                p['status'] = 'Aired Successfully'
                p['color'] = '#2c2c8cb3'

                for i in Sfr_analytics.objects.filter(day=p['day'],minute=p['minute']):
                    if p['channel'] in i.sfr_channel_name :
                        purcent = Impressions.objects.get(pk='1')
                        nb  = float(i.purcent) * float(purcent.total_users) / float(purcent.market_share_purcent)
                        p['nb_wach'] = int(nb)
            elif str(line.airStatuscode) == '0008':
                p['status'] = 'Failed, Adspot cut'
                p['nb_wach'] = '-'

            elif str(line.airStatuscode) == '1005':
                p['status'] = 'Not aired yet'
                p['nb_wach'] = '-'
                p['color'] = '#c7c7c7b3'


            else :
                p['status'] = 'Failed, Other Reason..'
                p['nb_wach'] = '-'

            data2.append(p)

    campaigns = len(Campaigns.objects.filter(id_user=request.session['id_user']))
    campaigns_active = len(Campaigns.objects.filter(id_user=request.session['id_user'],pacing=True))
    advertiser = most_advertisers(request)
    bookeds = booked_adbreaks(request)
    agences =active_agency(request)
    playlist = playlists(request)
    activites = Activity.objects.all().order_by('-id_activity')[0:5]
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    #activites = []
    print("end of loadchart")
    return JsonResponse({'labels': labels, 'result':result, 'channels_sfr':channel_sfr,'color':color})
    # return render(request, "DAIManagementApp/load_charts.html", {'labels': labels, 'result':result ,
    #                                                         'channels_sfr':channel_sfr,'color':color})

    #this function should verify the objext we create

@check_user
def insertion_verifs(request):
    #playlist = Playlists.objects.get(pk=id_playlist)
    channels = Channels.objects.filter(id_user=request.session['id_user'])
    #region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
    region ='2005'
    ftp_channel_name = channels.ftp_channel_name
    networkname = channels.networkname
    zonename = region
    broadcastdate = datetime.strftime(datetime.now() , '%Y%m%d')
    result = Verifs.objects.filter(networkname=networkname,zonename=zonename,broadcastDate=broadcastdate).last()
    #d = playlist.broadcastdate
    broadcastDate_verif = broadcastdate
    #win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)


    print("Wait I'm in ftp")
    filename_in_ftp = broadcastdate+"-"+zonename+"-00001.ver"
    path_in_ftp = "/"+ftp_channel_name+"/verifs/"
    path_in_app = "files/results/"+ftp_channel_name+"/verifs"
    if not os.path.exists(path_in_app):
        os.makedirs(path_in_app)
        # downloadFTP("uk06.tmd.cloud", "testftp@epgsano.com", "I?#=s3FfnSu_", "/2M/schedules/",  "test.txt" , "/var/www/html/DAI-Management/DAIManagement/FTP_files/")
    useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
    print(useraccess)
    downloadFTP(useraccess.ftp_server, useraccess.ftp_user, useraccess.ftp_password, path_in_ftp , filename_in_ftp, path_in_app)
        # def downloadFTP(host, user, password, filepath_inftp, file_inftp,  localpath):
    if Path(path_in_app+'/'+filename_in_ftp).exists():
            doc = xml2.parse(path_in_app+'/'+filename_in_ftp)
            Spots = doc.getElementsByTagName("Spot")
            verComplete = doc.firstChild.getAttribute("verComplete")
            results = []
            for spot in Spots:
                trafficId = spot.getAttribute("trafficId")
                spotId  = spot.getAttribute("spotId")
                airTime = spot.getAttribute("airTime")
                newAirTime = airTime.replace("T", " ")
                newAirTime2 = newAirTime.replace("+02:00", "")
                airLength = spot.getAttribute("airLength")
                airStatusCode = spot.getAttribute("airStatusCode")
                version = spot.getAttribute("revision")
                try:
                    verif_to_update = Verifs.objects.get(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId)
                    if verif_to_update:
                        verif_to_update.airTime = newAirTime2
                        verif_to_update.airLength = airLength
                        verif_to_update.airStatuscode = airStatusCode
                        verif_to_update.revision = version
                        verif_to_update.vercomplete = verComplete
                        verif_to_update.save()
                    else:
                        new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
                        new_ad_verif.save()
                except Verifs.DoesNotExist:
                    print('oups')
                    new_ad_verif = Verifs(networkname=networkname, zonename=zonename, broadcastDate=broadcastdate, trafficId=trafficId, spotId=spotId, airTime=newAirTime2, airLength=airLength, airStatuscode=airStatusCode, revision=version,  vercomplete = verComplete)
                    new_ad_verif.save()

@check_user
def verifs_timer_insert(request):
    import sched, time
    s = sched.scheduler(time.time, time.sleep)
    s.enter(300, 1, insertion_verifs, (request,))
    s.run()

@check_user
def expande(request):
    id_playlist = request.GET.get('id_playlist')
    playlist = Playlists.objects.get(pk=id_playlist)
    channels = Channels.objects.get(id_channel=playlist.id_channel.id_channel)
    region = ChannelsZone.objects.get(id_zone_channel=playlist.id_zone_channel.id_zone_channel)
    ftp_channel_name = channels.ftp_channel_name
    networkname = channels.networkname
    zonename = region.zonename
    broadcastdate = playlist.broadcastdate.replace("-","")
    result = Verifs.objects.filter(networkname=networkname,zonename=zonename,broadcastDate=broadcastdate).last()
    d = playlist.broadcastdate
    broadcastDate_verif = str(playlist.broadcastdate).replace("-","")
    win = Windows.objects.filter(id_playlist=playlist.id_playlist).values_list('id_window', flat=True)
    data = {}
    data["windows"] = []
    data["number_of_wins"] = 0
    i = 0
    windows = Windows.objects.filter(id_playlist=playlist)
    for window in windows:
        window_dic = {}
        window_dic['i'] = i
        i = i+1
        window_dic['id_window'] = window.id_window
        window_start_formated = datetime.datetime.strptime(window.window_start, '%Y-%m-%d %H:%M:%S')
        window_start_formated_2 = window_start_formated.strftime("%H:%M")
        window_end_formated = datetime.datetime.strptime(window.window_end, '%Y-%m-%d %H:%M:%S')
        window_end_formated_2 = window_end_formated.strftime("%H:%M")
        window_dic['window_start'] = window_start_formated_2
        window_dic['window_end'] = window_end_formated_2
        window_dic['avails_in_win'] = []
        window_dic['num_of_avails'] = 0
        data["number_of_wins"] = i
        avails = Avails.objects.filter(id_window=window.id_window)
        j = 0
        for avail in avails:
            avail_dic = {}
            avail_dic["j"] = j
            j = j+1
            avail_dic["id_avail"] = avail.id_avail
            avail_start_formated = datetime.datetime.strptime(avail.avail_start, '%Y-%m-%d %H:%M:%S')
            avail_start_formated2 = avail_start_formated.strftime("%H:%M")
            avail_dic["avail_start"] = avail_start_formated2
            avail_dic["adspots_in_avail"] = []
            window_dic["avails_in_win"].append(avail_dic)
            adspots = AdspotsInAvail.objects.filter(id_avail=avail.id_avail)
            window_dic['num_of_avails'] = j
            k = 0
            avail_dic["num_of_adspots"] = 0
            for adspot in adspots:
                adspot_dic = {}
                adspot_dic["k"] = k
                k = k+1
                avail_dic["num_of_adspots"] = k
                adspot_dic["id_adsinavail"] = adspot.id_adsinavail
                adspot_dic["id_adspot"] = adspot.id_adspot
                print("Wait I'm in database")
                try:
                    result = Verifs.objects.filter(broadcastDate = broadcastDate_verif, trafficId = adspot.trafficid, revision__lte = int(playlist.version)).latest('id_verif')
                    adspot_dic["airStatusCode"] = result.airStatuscode
                    adspot_dic["airTime"] = result.airTime
                except Verifs.DoesNotExist:
                    print('oups')
                else:
                    print("file not ready yet")
                avail_dic["adspots_in_avail"].append(adspot_dic)
        data["windows"].append(window_dic)
        record_inserted = 0
    data_playlist = {'data':data["windows"],'playlist': playlist, 'channels': channels,'region':region,'d':d,'number_of_wins':data["number_of_wins"],'record_inserted':record_inserted}
    return render(request, "DAIManagementApp/expande.html", data_playlist)

@check_user
def generate_playlist_from_prediction(request):
    if request.method == 'POST':
        # get day + channel + zone + user_id
        useraccess = Useraccess.objects.get(id_user=request.session['id_user'])
        channel_id = request.POST.get('channel_id')
        channeldata = Channels.objects.get(id_channel=channel_id)
        zonename = request.POST.get('zonename')
        daydate = request.POST.get('day')
        channel_zone = ChannelsZone.objects.get(id_channel=channel_id, zonename=zonename)


        # generate playlist with fixed windows

        daydate = datetime.datetime.strptime(str(daydate), '%m/%d/%Y')
        daydate = daydate.strftime('%Y-%m-%d')

        active_campaigns = Campaigns.objects.filter(id_user_id=request.session['id_user'], start_day__lte=str(daydate), end_day__gte=str(daydate))
        print(active_campaigns)

        start_date = str(daydate) + "T00:01:00+00:00"
        end_date = str(daydate) + "T23:59:00+00:00"
        now = datetime.datetime.now()
        Playlist = Playlists(id_channel_id=channel_id ,version="1", broadcastdate=str(daydate), start_date=start_date, end_date=end_date, creation_datetime=now, id_zone_channel_id=channel_zone.id_zone_channel,is_draft='0',draft_version='0')
        Playlist.save()
        traffic = 0
        record_inserted = 0
        day_times = DayTime.objects.exclude(id_time = 13)
        for day_time in day_times:
            # if request.POST.get('numofavails['+str(i)+']'):
            # numofavails = request.POST.get('numofavails['+str(i)+']')

            window_start = day_time.start
            window_start = daydate+' '+window_start+':00'
            window_end = day_time.end
            window_end = daydate+' '+window_end+':00'
            FMT = '%Y-%m-%d %H:%M:%S'
            window_duration = datetime.datetime.strptime(window_end, FMT) - datetime.datetime.strptime(window_start, FMT)
            window_duration = datetime.datetime.strptime(str(window_duration), '%H:%M:%S')
            window_duration = window_duration.strftime('%H%M%S00')
            Window = Windows(id_playlist_id=Playlist.id_playlist, window_start=window_start, window_end=window_end, window_duration=window_duration )
            Window.save()
            # for campaign in active_campaigns:
            #     camp_in_widnow = Placement.objects.filter(id)
            adbreaks_predected_in_window = Adbreak_predict.objects.filter(time__lt=day_time.end, time__gte=day_time.start).filter(day=daydate, id_channel = channel_id ).order_by('time')
            adbreaks_predected_in_window = adbreaks_predected_in_window.exclude(time=day_time.end)

            if not adbreaks_predected_in_window:
                Window.delete()
            j = 0


            for adbreak in adbreaks_predected_in_window:

                av_start = adbreak.time
                av_start = daydate+' '+av_start+':00'
                # number_of_ads = request.POST.get('numofads['+str(i)+']['+str(j)+']')
                Avail = Avails(id_window_id=Window.id_window, avail_start=av_start, availinwindow=str(j+1), datetime=now )
                Avail.save()
                j = j+1

                # for campaign in active_campaigns:
                #     camp_in_widnow = Placement.objects.filter(id_campaign = campaign.id_campaign, id_time = day_time.id_time, id_channel = channel_id )
                #     if camp_in_widnow:

                    # for k in range(int(number_of_ads)):
                    #     if request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']'):
                    #         adspot = request.POST.get('ad['+str(i)+']['+str(j)+']['+str(k)+']')
                    #         traffic +=1
                    #         AdspotsInAv = AdspotsInAvail(id_avail_id=Avail.id_avail, id_adspot_id=adspot, positioninavail=str(k+1), trafficid=traffic)
                    #         AdspotsInAv.save()



                # insert empty avails in those windows from adbreak predict
                # insert adposts from campaigns into those avails
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request, "DAIManagementApp/generate_playlist_from_prediction.html",{'data': {'channels': channels}})

    channels = Channels.objects.filter(id_user=request.session['id_user'])
    return render(request, "DAIManagementApp/generate_playlist_from_prediction.html",{'data': {'channels': channels}})

@check_user
def add_emissions(request):
    if request.method == 'POST':
        emission = request.POST.get('emission_name')
        emission= emission.strip()
        emission_name= emission.replace(' ','_')
        channel_id = request.POST.get('channel')
        emission_start = request.POST.get('emission_start')

        start_date = emission_start.split('T')[0]
        start_time = emission_start.split('T')[1]
        emission_end = request.POST.get('emission_end')

        end_date = emission_end.split('T')[0]
        end_time = emission_end.split('T')[1]
        short_desc = request.POST.get('short_desc')
        genre = request.POST.get('genre')
        category = request.POST.get('category')
        episode_name = request.POST.get('episode_name')
        season_number = request.POST.get('season_number')
        episode_number = request.POST.get('episode_number')
        extended = request.POST.get('extended')
        year= start_date.split('-')[0]

        print(emission,channel_id,start_time,end_time ,short_desc, sep=' ')
        channel = Channels.objects.get(pk=channel_id)
        path_emission = 'emissions/'+channel.channel_name.replace(' ','_')+'/'+start_date.replace('-','_')+'/'+emission_name+'/'

        if not os.path.exists("static/"+path_emission):
            os.makedirs("static/"+path_emission)


        try :
            path_image = path_emission+'image/'
            if not os.path.exists('static/'+path_image):
                os.makedirs("static/"+path_image)
            handle_uploaded_file(request.FILES['image'], "static/"+path_image, emission+'.png')
            path_image += emission+'.png'
        except:
            try :
                import requests
                path_image = path_emission+'image/'
                image = request.POST.get('image')
                response = requests.get(image)

                file = open('static/'+path_image+emission+'.png', "wb")
                file.write(response.content)
                file.close()
                path_image += emission+'.png'

            except :
                path_image = ''



        new_emission=Emissions(emission_name=emission.lower().strip(),id_channel=channel,start_date=start_date,start_time=start_time,
                            end_date=end_date,end_time=end_time,image=path_image)
        new_emission.save()




        return redirect(edit_emissions)
        #os.system(f"ffmpeg -i {video_path}/{file} -ss {start} -to {end} -c copy {output_path}/{file_name}_part2.mp4")


    try :
        channels = Channels.objects.filter(id_user=request.session['id_user'])
        return render(request,'emissions/converter.html',{'channels':channels})
    except :
        return redirect(login)

@check_user
def upload_epg(request):
    if request.method == 'POST':
        filename = str(request.FILES['epg_file']).replace(' ','_')
        print(filename)
        path = "epg/user_"+ request.session['id_user']
        if not os.path.exists(path):
                os.makedirs(path)
        handle_uploaded_file(request.FILES['epg_file'], path, filename)
        insert_epg(path+'/'+filename)
    return render(request , "DAIManagementApp/epg_upload.html" )

def insert_epg(path ):

        import pandas as pd
        from datetime import datetime
        channel = Channels.objects.get(pk=1)
        df = pd.read_excel(path)
        epg = df[['Event Name' ,'Start Date' ,'Start Time','End Date' , 'End Time' , 'Genre']]
        for i in epg.values:
            startdate = datetime.strptime(i[1],"%d/%m/%Y")
            enddate = datetime.strptime(i[3],"%d/%m/%Y")
            starttime= str(i[2])[:-3] +'.000000'
            endtime=  str(i[4])[:-3]  +'.000000'
            emission = Epg(id_channel=channel,emission_name=i[0],start_date=startdate,start_time=starttime.strip(),end_date=enddate,end_time=endtime.strip(),genre=i[5])
            emission.save()





@check_user
def insert_product(request):
    product_name = request.POST.get('product_name')

    product = Product(
        name = request.POST.get('product_name'),
        description = request.POST.get('product_description', None)
    )

    pass





def process_adslot(reuqest):
    from datetime import datetime
    from .utils import TelegramLog
    # URL to fetch the playlist JSON
    url = "https://almatv-stor.vizionr.fr/synthesia/almatv/playlist/manager/php/_getPlaylist.php?fields=title,isPub,duration"
    playlists = []
    # Function to fetch and parse the playlist JSON from the URL
    def fetch_playlist_data(url):
        response = requests.get(url)
        if response.status_code == 200:
            return response.json()
        else:
            return None
    def insert_ad_slots(ad_slots_json):
        # check if subid already in database skip it
        date_format = "%Y-%m-%d"  # Adjust as necessary to match the format in your JSON
        visioner = AdslotsVisionr()
        for playlist in ad_slots_json:  # Assuming `ad_slots_json` is the parsed JSON data
            playlist_date_str = playlist.get("playlistDate", "")

            # Validate and format playlist_date
            try:
                playlist_date = datetime.strptime(playlist_date_str, date_format).date()
            except ValueError:
                # If there's an error parsing the date, use the current date as default
                playlist_date = datetime.now().date()

            for ad in playlist.get('data', []):
                if ad.get('isPub', 0) == '1':
                    try:

                        # Check if an object with the same values already exists
                        print("SUB ID: ",ad.get("subId"))
                        adslot = AdslotsVisionr.objects.get(
                            playlistdate = playlist.get("id"),
                            subid=f"{ad.get('subId')}",
                        )
                        print("Found Object",adslot)

                    except AdslotsVisionr.DoesNotExist:
                        visioner.adid = ad.get('id', ''),
                        visioner.subid = ad.get('subId', ''),
                        visioner.start = ad.get('start', None),
                        visioner.end = ad.get('end', None),
                        visioner.startts = ad.get('startTs', None),
                        visioner.endts = ad.get('endTs', None),
                        visioner.cut = ad.get('cut', None),
                        visioner.scenario = ad.get('scenario', ''),
                        visioner.duration = ad.get('duration', None),
                        visioner.ispub = ad.get('isPub', None),
                        visioner.title = ad.get('title', ''),
                        visioner.replacementadid = "",
                        visioner.replacementpath = "",
                        visioner.replacementduration = None
                        visioner.status = 'Pending',
                        visioner.updatesent = 0,
                        visioner.playlistdate = playlist_date
                        visioner.save()

                        # telegram = TelegramLog()
                        # telegram.send_telegram_log(f"Error While insert New Playlist {str(e)}")

        return playlists

    ad_slots = fetch_playlist_data(url)

    # print(ad_slots)
    if ad_slots:
        insert_ad_slots(ad_slots)
    else:
        print("Failed to fetch playlist data")
    return JsonResponse(playlists,safe=False, status=200)






def process_ad_slots(request):
    from django.db import connection
    import requests
    from datetime import datetime
    # URL to fetch the playlist JSON
    url = "https://tvpitchoun-stor.easytools.tv/synthesia/tvpitchoun/playlist/manager/php/_getPlaylist.php?fields=title,internal,duration"

    # Function to fetch and parse the playlist JSON from the URL
    def fetch_playlist_data(url):
        username = "advmanager"
        password = "DFG$_Rv!934"
            
        response = requests.get(url,auth=(username,password))
        if response.status_code == 200:
            return response.json()
        else:
            return None


    def insert_ad_slots(ad_slots_json):
        # check if subid already in database skip it
        #
        cursor = connection.cursor()
        insert_query = """
            INSERT IGNORE INTO AdSlots_VisionR
            (AdId, SubId, Start, End, StartTs, EndTs, Cut, Scenario, Duration, IsPub, Title, ReplacementAdId, Status, UpdateSent, PlaylistDate)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """

        # Example format for PlaylistDate in your JSON: "2024-01-22"
        date_format = "%Y-%m-%d"  # Adjust as necessary to match the format in your JSON

        for playlist in ad_slots_json:  # Assuming `ad_slots_json` is the parsed JSON data
            playlist_date_str = playlist.get("playlistDate", "")

            # Validate and format playlist_date
            try:
                playlist_date = datetime.strptime(playlist_date_str, date_format).date()
            except ValueError:
                # If there's an error parsing the date, use the current date as default
                playlist_date = datetime.now().date()

            for ad in playlist.get('data', []):
                data_tuple = (
                    ad.get('id', ''),
                    ad.get('subId', ''),
                    ad.get('start', 0),
                    ad.get('end', 0),
                    ad.get('startTs', 0),
                    ad.get('endTs', 0),
                    ad.get('cut', 0),
                    ad.get('scenario', ''),
                    ad.get('duration', 0),
                    ad.get('isPub', 0),
                    ad.get('title', ''),
                    '',
                    'Pending',
                    False,
                    playlist_date
                )
                if ad.get('internal', 0) == 'ad_break':
                    cursor.execute(insert_query, data_tuple)
            # connection.commit()

        cursor.close()

    # Main logic to fetch playlist data and insert into the database
    ad_slots = fetch_playlist_data(url)

    # print(ad_slots)
    if ad_slots:
        insert_ad_slots(ad_slots)
        print("Adslot Insert SuccessFully")
    else:
        print("Failed to fetch playlist data")

def vast_handling_old(url,headers,params,call_num,spot):
    import requests
    from requests.auth import HTTPProxyAuth
    from concurrent.futures import ThreadPoolExecutor
    from urllib3.util import parse_url
    import random
    import datetime
    import os
    import xml.etree.ElementTree as ET
    
    from bs4 import BeautifulSoup

    # Create a folder with current datetime
    current_datetime = datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')
    folder_path = os.path.join(os.getcwd(), str(current_datetime)+"_alma")
    # get adpots
    # Ninja GO 2401843p2gl.ts"
    # City  LegoMack.ts
    # Technic LegoMack.ts
    # Star Wars 2403484p2gl.ts

    adspot = Adspots.objects.get(filename=spot)
    # print("Adspot: ",adspot.filename)s
    with transaction.atomic():
        response = requests.get(url, headers=headers, params=params)
        if response.status_code == 200:
            vast_response = None #! need an edit
            
            if vast_response:
                # print("the vast response is :", vast_response)
                root = ET.fromstring(vast_response)
                print(vast_response)
                
                print("the ROOT is :", root)
                print("the ROOT find is :", root.text)# .attrib.get("Ad"))
                current_time = datetime.datetime.now()
                datetime_timestamp = current_time
                datetime_string = current_time.strftime("%Y-%m-%d %H:%M:%S")
                # print(tracking_start)
                # Extract data from the XML
                if root.find(".//Ad"):

                    ad_id = root.find(".//Ad").attrib["id"]
                    tracking_start = root.find(".//Tracking[@event='start']").text
                    tracking_first_quartile = root.find(".//Tracking[@event='firstQuartile']").text
                    tracking_midpoint = root.find(".//Tracking[@event='midpoint']").text
                    tracking_third_quartile = root.find(".//Tracking[@event='thirdQuartile']").text
                    tracking_complete = root.find(".//Tracking[@event='complete']").text
                    # start edit
                    impression_double_click = None
                    impression_double_click = None
                    impression_sprintserve = None
                    # if root.find(".//Impression[@id='SpringServe_Impression_1']") == None:
                    
                        # Try to get the full element content including CDATA
                        # soup = BeautifulSoup(vast_response)
                    double_click = root.find("Impression")
                    print("DOUBLE CLICK: ",double_click)
                    impression_double_click = impression_double_click
                    print("Empression: ",impression_double_click)

                    # else:
                    #     impression_sprintserve = root.find(".//Impression[@id='SpringServe_Impression_1']")
                    #     impression_double_click = root.find(".//Impression[@id='SpringServe_Impression_3P_1']").text


                    # Create and save a new VASTResponse object

                    vast_obj = VastResponse.objects.create(
                        ad_id=ad_id,
                        tracking_start = tracking_start,
                        tracking_firstquartile=tracking_first_quartile,
                        tracking_midpoint=tracking_midpoint,
                        tracking_thirdquartile=tracking_third_quartile,
                        tracking_complete=tracking_complete,
                        datetime_timestamp=datetime_timestamp,
                        datetime_string=datetime_string,
                        vast_url=url,
                        impression_double_click=impression_double_click,
                        impression_sprintserve=impression_sprintserve,
                        ad_spot=adspot
                    )
                    # save traking links
                    if vast_obj.impression_sprintserve:
                        sprintserve_response = requests.get(impression_sprintserve, headers=headers)
                        vast_obj.impression_sprintserve_status = sprintserve_response.text or None

                    if impression_double_click:
                        double_click_response = requests.get(impression_double_click, headers=headers)
                        vast_obj.impression_double_click_status = double_click_response.text or None

                    vast_obj.save()

                else:

                    VastResponse.objects.create(
                            datetime_timestamp=datetime_timestamp,
                            datetime_string=datetime_string,
                            vast_url=url,
                            # ad_spot=adspot
                        )





                print(f'sent request num {call_num}')
            else:
                print(f'Error: Failed to make the VAST request. Status code: {response.status_code}')

    # str_volume = str(total_volume)
    # telegram_message="""
    # ============== ALMA API ==============
    # URL: """+response.url+"""
    #
    # Method: """+str(response.request.method)+"""
    #
    # Headers: """+str(response.request.headers)+"""
    #
    # Number of requests sent: """+str_volume+"""
    # ====================================
    # """
    # send_msg_telegram3(telegram_message)
    #
    # print(f"URL: {response.url}")
    # print(f"Method: {response.request.method}")
    # print(f"Headers: {response.request.headers}")
    # if response.request.body:
    #     print(f"Body: {response.request.body}")

    ## after 10 minutes call to the new function
        # Initialize data as an empty dictionary
    # =============================== SAVING JSON ================
    # json_file_path = '/var/www/html/DAI27/Adtlas_DAI/sync_validation_dates.json'
    # data = {}

    # # Check if the JSON file exists
    # if os.path.exists(json_file_path):
    #     with open(json_file_path, 'r') as json_file:
    #         # Load existing data from the JSON file
    #         data = json.load(json_file)

    #     # Rename 'last_date' to 'previous_date' if it exists
    #     data['previous_date'] = data.get('last_date', None)

    # # Get the current date and time in the specified format and
    # # Update 'last_date' with the current date and time
    # # data['last_date'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    # # Add 10 minutes to the current date and time and format it
    # data['last_date'] = (datetime.datetime.now() + timedelta(minutes=10)).strftime('%Y-%m-%d %H:%M')

    # # Write the updated data back to the JSON file
    # with open(json_file_path, 'w') as json_file:
    #     json.dump(data, json_file, indent=4)
    # ================ END SAVING JSON
    return call_num

def vast_handling(url, headers, params, call_num, spot):
    from .models import VastResponseGo
    adspot = Adspots.objects.get(filename=spot)
 
    with transaction.atomic():
        # Fetch XML from URL
        xml_content = fetch_xml_from_url(url)
        if xml_content is None:
            print("Failed to get content, stopping process")
            return
        
        # Parse XML
        root = parse_xml_content(xml_content)
        if root is None:
            print("Failed to parse XML, stopping process")
            return

        datetime_timestamp = datetime.datetime.now()
        datetime_string = datetime_timestamp.strftime("%Y-%m-%d %H:%M:%S")  

        ad_components, ad_component_list = extract_data_by_attributes(root, "Ad")
        # 
        for i in range(len(ad_component_list)):
            ad_component_data = ad_component_list[i] 
            ad_component  = ad_components[i]
            if "attributes" in ad_component_data and "id" in ad_component_data["attributes"]:
                # 
                ad_id = ad_component_data["attributes"]["id"]
                
                #     
                _, tracking_start_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "start"})
                _, tracking_first_quartile_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "firstQuartile"})
                _, tracking_midpoint_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "midpoint"})
                _, tracking_third_quartile_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "thirdQuartile"})
                _, tracking_complete_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "complete"})
                _, tracking_mute_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "mute"})
                _, tracking_unmute_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "unmute"})
                _, tracking_pause_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "pause"})
                _, tracking_fullscreen_list = extract_data_by_attributes(ad_component, "Tracking", {"event": "fullscreen"})
                #  
                tracking_start = tracking_start_list[0]["text"] if len(tracking_start_list) >= 1 else None
                tracking_first_quartile = tracking_first_quartile_list[0]["text"] if len(tracking_first_quartile_list) >= 1 else None
                tracking_midpoint = tracking_midpoint_list[0]["text"] if len(tracking_midpoint_list) >= 1 else None
                tracking_third_quartile = tracking_third_quartile_list[0]["text"] if len(tracking_third_quartile_list) >= 1 else None
                tracking_complete = tracking_complete_list[0]["text"] if len(tracking_complete_list) >= 1 else None
                tracking_mute = tracking_mute_list[0]["text"] if len(tracking_mute_list) >= 1 else None
                tracking_unmute = tracking_unmute_list[0]["text"] if len(tracking_unmute_list) >= 1 else None
                tracking_pause = tracking_pause_list[0]["text"] if len(tracking_pause_list) >= 1 else None
                tracking_fullscreen = tracking_fullscreen_list[0]["text"] if len(tracking_fullscreen_list) >= 1 else None
                # 
                _, impression_list = extract_data_by_attributes(ad_component, "Impression")

                impression = None
                impression_3p = None
                if len(impression_list) == 1 :
                    impression = impression_list[0]["text"]
                    impression_3p = None
                elif len(impression_list) == 2 :  
                    impression = next((item["text"] for item in impression_list if "attributes" in item and "id" in item["attributes"] and item["attributes"]["id"] == "SpringServe_Impression_1"), None)
                    impression_3p = next((item["text"] for item in impression_list if "attributes" in item and "id" in item["attributes"] and item["attributes"]["id"] == "SpringServe_Impression_3P_1"), None)
                else:
                    print("Need To Be Handel ")

                # Create and save a new VASTResponse object
                vast_obj = VastResponseGo.objects.create(
                    ad_id=ad_id,
                    tracking_start = tracking_start,
                    tracking_firstquartile=tracking_first_quartile,
                    tracking_midpoint=tracking_midpoint,
                    tracking_thirdquartile=tracking_third_quartile,
                    tracking_complete=tracking_complete,
                    datetime_timestamp=datetime_timestamp,
                    datetime_string=datetime_string,
                    vast_url=url,
                    impression_double_click=impression,
                    impression_sprintserve=impression_3p,
                    ad_spot=adspot
                )
                # save traking links
                if impression_3p:
                    sprintserve_response = requests.get(impression_3p, headers=headers)
                    vast_obj.impression_sprintserve_status = sprintserve_response.text or None
                    # print(sprintserve_response.status_code)

                if impression:
                    double_click_response = requests.get(impression, headers=headers)
                    vast_obj.impression_double_click_status = double_click_response.text 
                    # print(double_click_response.status_code)
                    
                if tracking_complete:
                    completed_click_response = requests.get(tracking_complete, headers=headers)
                    vast_obj.tracking_completed_status = completed_click_response.text 
                    # print(double_click_response.status_code)
                
                if tracking_first_quartile:
                    first_quartile_response = requests.get(tracking_first_quartile, headers=headers)
                    vast_obj.tracking_firstquartile_status = first_quartile_response.text
                    # print(tracking_first_quartile.status_code)

                if tracking_midpoint:
                    tracking_midpoint_response = requests.get(tracking_midpoint, headers=headers)
                    vast_obj.tracking_midpoint_status = tracking_midpoint_response.text
                    # print(tracking_midpoint.status_code)
                
                if tracking_third_quartile:
                    tracking_third_response = requests.get(tracking_third_quartile, headers=headers)
                    vast_obj.tracking_thirdquartile_status = tracking_third_response.text
                    # print(tracking_third_quartile.status_code)
                

                vast_obj.save()


# VAST CALL to ALMA
def call_vast_api(request):
    from .tasks import call_vast
    import requests
    from requests.auth import HTTPProxyAuth
    from concurrent.futures import ThreadPoolExecutor
    from urllib3.util import parse_url
    import random
    import datetime
    import os
    import xml.etree.ElementTree as ET

    iab_cat = {
        'Feuilleton': 500001 ,
        'Magazine': 500005,
        'Mini-Série': 500001,
        'Magazine ': 500005,
        'Mini-Serie': 500001,
        'Serie': 500001,
        'Information': 500011,
        'Magazine Sportif': 500013,
        'Dessin Anime': 500004,
        'Magazine Sportif ': 500013,
        'Telefilm': 500001,
        'Documentaire': 500003,
        'Religieux': 500008,
        'Sport': 500013,
        'Long Metrage': 500002,
        'News': 500011,
        'Long-Metrage': 500002,
        'Des Histoires Et Des Hommes': 500001,
        'Série': 500001,
        'Musique': 500006,
        'Theatre': 500007,
        'Spectacle': 500012,
        'Jeux': 500014
    }

    # iab_c = iab_cat[genre]

    params = {
        'w': '720',
        'h': '567',
        'content_genre': "Dessin Anime",
        'content_title': "Abtal albihar",
        'language': 'ar-MA',
        'pod_max_dur': 20,
        'channel_name': '2M_TV',
        'country': 'France'

    }

    # num_requests = total_volume
    num_requests = 20
    # *Links to send you have to comment them one by one

    #! Weaber
    # url = "https://tv.springserve.com/vast/769609"

    #! City
    # url = 'https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}'

    #! Ninjago
    url = "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    #! Technic
    # url = "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    #! Star Wars
    # url = "https://tv.springserve.com/vast/852369?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    vast = call_vast.delay(url=url,num_requests=num_requests,params=params)


    proxy_list = [
        # {"http": "http://91.208.69.128:12323", "https": "https://91.208.69.128:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://45.149.167.59:12323", "https": "https://45.149.167.59:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://191.101.246.245:12323", "https": "https://191.101.246.245:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://191.101.246.108:12323", "https": "https://191.101.246.108:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},
        # {"http": "http://185.248.48.237:12323", "https": "https://185.248.48.237:12323", "auth": HTTPProxyAuth("14ae3c1adf82b", "c66b1ace34")},


        # {"http": "185.124.241.179:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "89.116.242.185:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "45.140.244.198:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "193.106.198.128:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        # {"http": "136.175.224.210:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},

        {"http": "185.124.241.179:12323",  "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "89.116.242.185:12323",   "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "45.140.244.198:12323",  "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "193.106.198.128:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},
        {"http": "136.175.224.210:12323", "auth": HTTPProxyAuth("14a2f5dfde475", "51ae51af21")},







    ]

    proxies0 = [
        {"http": "http://14a2f5dfde475:51ae51af21@185.124.241.179:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@89.116.242.185:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@45.140.244.198:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@193.106.198.128:12323"},
        {"http": "http://14a2f5dfde475:51ae51af21@136.175.224.210:12323"},
    ]

    # Rest of your code here..
    # with ThreadPoolExecutor(max_workers=20) as executor:
    #     futures = [executor.submit(vast_handling, url, headers=headers, params=params,call_num=i) for i in range(1,num_requests+1)]
    #     response = [future.result() for future in futures]
    #     print(response)





def fetch_playlist_data(url):
    from datetime import datetime, timedelta
    response = requests.get(url)
    if response.status_code == 200:
        head_data = response.json()[0]
        playlist_data = response.json()[0]['data']
        # return head_data['id']
        processed_data = [
            {
                **item,
                'head':head_data['id'],
                'date':head_data['id'],
                'id_start_combined': f"{head_data['id']} {item.get('start', '')}",
                'start_minus_one_week': (datetime.strptime(f"{head_data['id']} {item.get('start', '')}", '%Y-%m-%d %H:%M:%S.%f') - timedelta(weeks=1)).strftime('%Y-%m-%d %H:%M:%S.%f')
                }
            for item in playlist_data if "isPub" in item and item["isPub"] == '1'
        ]
        return processed_data
    else:
        return None


def create_playlist_xml(data):
    import xml.etree.ElementTree as ET
    from datetime import datetime
    import uuid
    import random
    import ast
    # print("Playlist Data: ",data)
    # Create the root element
    playlist = ET.Element("playlist")
    # print("PLAY LIST INFORMATIONS: ",data)
    # Add attributes to the root element
    playlist_date = data["date"]
    print("Date: ",playlist_date)
    synthesia_id = data['id'][0]
    synthesia_sub_id = data["subId"][0]

    playlist.set("playlistDate", playlist_date)
    playlist.set("synthesiaId", synthesia_id)
    playlist.set("synthesiaSubId", synthesia_sub_id)

    # Create the "data" element
    data_element = ET.SubElement(playlist, "data")
    # Generate random videos for replacement
    # video_list = ['ISLA_30s_RAMADAN3.mp4','LEGO_minecraft.mp4','LEGO_starwars.mp4','Lego_1house.mp4','out_Zakia_new_v3.mp4']


    # auto fill xml
    # Loop through the provided data and add each item to the XML
    total_duration = 0
    alma_replacement_ads = AlmaAds.objects.filter(name="TGTG_30_16x9.mp4")
    print(alma_replacement_ads.count())
    
    # print(alma_replacement_ads)
    for video in alma_replacement_ads:
        
        # check if ads will fill in alma ad interval
        #! This comparison should be bitween video duration and the most used video in our api
        # print("Duration type : ", int(data['duration'][0]))
        print(total_duration)
        print(video.duration)
        if total_duration + int(video.duration) <= int(data['duration'][0]):
            print("we Have new Ad :", int(data['duration'][0]))
            media_element = ET.SubElement(data_element, "media")
            media_element.set("id", str(video.id))   # uniquly generated id
            media_element.set("path", f"{video.ads_path}")  # replacement ad path
            media_element.set("duration", str(video.duration)) # ad duration
            print("insert ad: ",video.name)
            total_duration += video.duration #update total duration
            print(f"We still have :{int(data['duration'][0]) - total_duration} Empty")
        else:
            
            print(f"Done and we have  :{int(data['duration'][0]) - total_duration} Empty")
            # insert filler
   
    # # Create the XML string
    xml_declaration = '<?xml version="1.0" encoding="UTF-8"?>\n'
    xml_string = xml_declaration + ET.tostring(playlist, encoding="utf-8").decode()

    base_dir = "/var/www/html/DAI27/Adtlas_DAI/files/vast/pitchon"
    # Get current date for folder name
    today = datetime.now()
    date_folder = today.strftime("%Y-%m-%d")
    # Create full path including date folder
    folder_path = os.path.join(base_dir, date_folder)
    os.makedirs(folder_path, exist_ok=True)

    # Generate unique filename with timestamp and UUID
    filename = f"{str(datetime.now()).replace(' ','_').replace(':','_')}_{str(uuid.uuid4())}.xml"
    file_path = os.path.join(folder_path, filename)

    # Write the XML to a file
    # file_path = f"/var/www/html/DAI27/Adtlas_DAI/playlists/new/pitchon/{str(datetime.now()).replace(' ','_').replace(':','_')}_{str(uuid.uuid4())}.xml"
    with open(file_path, "w",encoding='utf-8') as xml_file:
        xml_file.write(xml_string)

    print("Uploading to FTP...")
    # ftp_server = 'almatv-stor.vizionr.fr'
    # username = 'almatv'
    # password = 'fs!z_Df$35-dF'

    remote_dir = '/PLAYLIST'
    # port=10120
    ftp_server = "tvpitchoun-stor.easytools.tv"
    username = "advmanager"
    password = "DFG$_Rv!934"
    port = 21


    # upload XML file to FTP
    ftp_client = FTPClient(ftp_server=ftp_server, port=port, username=username, password=password)
    ftp_client.upload_file(file_path,remote_dir)
    total_duration = 0 #set total duration 0 for next iteration


class GetPlayListImpression(APIView):
    from rest_framework.response import Response


    def post(self, request, format=None):
        from collections import Counter
        from .utils import visioner_playlist
        if request.method == 'POST':
            playlist = request.data
            # print("Playlist: ",playlist)
            now=datetime.datetime.now()
            now_minus_week= now-datetime.timedelta(days=7)
            last_week_day = now_minus_week.strftime('%Y-%m-%d')
            last_week_minute = now_minus_week.strftime('%H:%M:00')

            # Assuming there's a field named 'timestamp' in your Epg model
            start_time = now_minus_week - timedelta(minutes=10)
            end_time = now_minus_week + timedelta(minutes=10)

            # Get SFR analytics volume for last week
            # try:
                # analytics_data = Sfr_analytics.objects.get(
                #     day=last_week_day,
                #     minute=last_week_minute,
                #     sfr_channel_name="2M Maroc"
                # )
                # total_volume = round(float(analytics_data.purcent) * 4500000 / 17)
                # print('Analytic SRF: ',analytics_data.minute)

            # except Sfr_analytics.DoesNotExist:
            #     total_volume = 0
            #     print({"message": "SFR Not Found"})
            #     return JsonResponse({'message':'SFR Not Found'})
            # finally:
        try:
            # get epg for last week based on start and interval
            # epg_data = Epg.objects.filter(start_time__lte=now_minus_week,end_time__gte=now_minus_week)
            # serialized_data = serialize('json', epg_data)
            # # Convert serialized data to Python data (list of dictionaries)
            # deserialized_data = json.loads(serialized_data)
            
            # call_vast.delay(
            #     emission_name=emission.emission_name,
            #     genre=emission.genre,
            #     epg_duration_in_seconds = (emission.end_time - emission.start_time).total_seconds(),
            #     total_volume = total_volume
            #     )
            
            # check if that ads is being replaced before
            if AdslotsVisionr.objects.filter(playlistdate=playlist['date'],subid=playlist['subId'])[0].status == "Completed":
                print("This Ad Has Already Been Replaced")
                return JsonResponse({"message":"This Ad Has Already Been Replace"})
            else:

                    

                processed_data = {
                        **playlist,
                        "date": playlist['date']
                        }
                # here we need to create playlist based on most frequanlty ads but we need to wait alma response
                print("Creating Playlist",playlist['date'])
                try:
                    most_common_title = Counter(item['title'] for item in visioner_playlist()).most_common(1)
                    if most_common_title:
                        common_title = most_common_title[0][0]
                        print("The most replaced ads is:",common_title)
                        for item in visioner_playlist():
                            if item['title'] == common_title:
                                # print("Full item details:", item)
                                pass
                    create_playlist_xml(processed_data)
                except Exception as e:
                    raise e
                # # Update Adslots Visioner Status
                adslot = AdslotsVisionr.objects.filter(playlistdate=playlist['date'],subid=playlist['subId'])[0]
                adslot.status = "Completed"
                adslot.save()
                # print([item['title'] for item in playlist])



                return JsonResponse({"message":"Alma Ads Has bees replaces Succesfully","status":"success"})
        except Epg.DoesNotExist:
            print("Update Epg Database")
            return JsonResponse({"message":"EPG Unsynchronized","status":"failed"})


import ast
from django.views import View
from django.db.models import Q
from django.utils import timezone
from django.utils.dateparse import parse_datetime

from .utils import calculate_priority_score, FTPConnector
from .models import Adbreaks, ChannelsZone, Epg, Campaigns, Campaignairtimelog, CampaignPriorityScores, Adspots, CampaignTimeIntervals



class DynamicAdPlaylist(View):

    from .utils import TelegramLog

    # telegram_bot = TelegramLog(BOT_TOKEN="6723497869:AAEKCB3YJUGnUFeMihLlr-FHblTuvtoVUZA",GROUP_ID="-4153299009")
    telegram_bot = TelegramLog(BOT_TOKEN="6599658496:AAFdK7Gsm1LvgYCIw2o8_frcbte1yNh-w44",GROUP_ID="-4175287619")

    def get(self, request):
        import pytz
        from datetime import datetime, timedelta

        try:
            # Assigning identifiers for channel and regional data.
            channel_id = 1
            channel_name = "2M Monde"
            # Retrieving start date and duration parameters from the request.
            start_date_time_str = request.GET.get("start")
            duration_seconds = float(request.GET.get("duration", 20))
            region_test = request.GET.get("region", "France")
            # Parsing the start date and time string into a datetime object.
            start_datetime = datetime.strptime(start_date_time_str, "%Y-%m-%d %H:%M:%S.%f")
            # Define the timezone for France or Netherlands
            local_timezone = "Europe/Paris" # "Europe/Amsterdam"
            france_timezone = pytz.timezone(local_timezone)
            start_datetime_france = france_timezone.localize(start_datetime)
            # Define the timezone for Morocco
            morocco_timezone = pytz.timezone("Africa/Casablanca")
            start_datetime_morocco = morocco_timezone.localize(start_datetime)
            # Adjusting the start datetime by adding 2 hours to align with a specific timezone or scheduling requirement.
            start_datetime = start_datetime + timedelta(hours=2)
            # Ensuring timezone awareness for the start datetime to maintain consistency and prevent ambiguity.
            start_datetime = timezone.make_aware(start_datetime, timezone.get_current_timezone())
            # Extracting the date component from the timezone-aware start datetime for further processing or storage.
            # start_date = start_datetime_france.date()
            start_date = start_datetime.date()
            start_time = start_datetime.time()

            # Step 1: Create and save AdBreaks object based on resived data about the ad-break
            # Create and save AdBreaks object
            ad_break = Adbreaks.objects.create(
                start_at=start_datetime_morocco,
                duration=duration_seconds,
                id_channel=channel_id,
                channel_name=channel_name
            )

            # Step 2: Retrieve or create Playlist object With Window Based on ad-break duration
            # Retrieving the channel zone information based on the channel ID and region, selecting the first match if available.
            channel_zone = ChannelsZone.objects.filter(id_channel=channel_id, region=region_test).first()

            # Step 3: Retrieve Epg objects for the specified start_date and channel_id
            emissions = Epg.objects.filter(
                id_channel = channel_id,
                start_time__lte = start_datetime_morocco,
                end_time__gte = start_datetime_morocco
            )

            # Step 4: Retrieve campaigns for the date range and based on if emissions are retrieved and the count is one
            if emissions.exists() and emissions.count() == 1:
                # Retrieve campaigns for the date range and where emissions" category is in program_category
                emissions_genres = emissions.first().genre
                emissions_categories = emissions.first().category
                # print(emissions_categories)
                active_campaigns = Campaigns.objects.filter(
                    start_day__lte=start_date,
                    end_day__gte=start_date,
                    program_category__icontains=emissions_genres,
                    country=region_test.lower()
                )
            else:
                active_campaigns = Campaigns.objects.filter(
                    start_day__lte=start_date,
                    end_day__gte=start_date,
                    country=region_test.lower()
                )
            # Filter out campaigns that have reached their airtime limit
            active_campaigns = [
                # Iterate over active campaigns
                campaign for campaign in active_campaigns
                # Check if the number of airtime logs for the campaign on the start_date is less than the allowed broadcasts per day
                if Campaignairtimelog.objects.filter(
                    campaign=campaign,
                    airtime_start__date=start_date,
                    insertion_status=True
                ).count() < campaign.broadcasts_day
                # Check if there exist time intervals for the campaign where the start_time falls within the interval
                and CampaignTimeIntervals.objects.filter(
                    campaign=campaign,
                    start_time__lte=start_time,
                    end_time__gte=start_time
                ).exists()
            ]

            # Step 5: Calculate Priority Score For Each Active Campaign and save it in CampaignPriorityScores
            # List to store campaign IDs and priority scores
            priority_score_list = []
            # Iterate through active campaigns
            for campaign in active_campaigns:
                # Get the first emission from the list of emissions
                actual_emission = emissions.first()
                # Calculate TSA (Target Specificity Allocation)
                TSA = (
                    2 if campaign.exclusivity else                    # If campaign has exclusivity, TSA is 2
                    (1 if campaign.shows_preference and actual_emission and actual_emission.emission_name in campaign.shows_preference else   # If emission name is in preferred shows, TSA is 1
                    (0.5 if campaign.program_category and actual_emission and actual_emission.genre in campaign.program_category else 0))   # If emission genre is in category programs, TSA is 0.5 else TSA is 0
                )
                # Perform the query to get the count of already aired slots for the campaign
                already_aired_slots = Campaignairtimelog.objects.filter(
                    campaign = campaign,
                    airtime_start__date = start_date,
                    insertion_status = True
                ).count()
                # Calculate Airing Requirement Fulfillment (ARF)
                ARF = already_aired_slots / campaign.broadcasts_day
                # Parse string to date if it's a string, otherwise extract date part
                end_date = (datetime.strptime(campaign.end_day, "%Y-%m-%d").date() if isinstance(campaign.end_day, str) else campaign.end_day.date())
                # Calculate urgency (Trend (U)) as a factor inversely proportional to the number of days left until the campaign's end date
                U = 1 / ((end_date - datetime.now().date()).days + 1)
                # Get position preferences from the campaign table and transform them into a list :::> Convert the string to a list of strings
                position_preferences = ast.literal_eval(campaign.position_preference)
                # Define weights for each position preference, starting from 1
                weights = {"before": 1, "during": 2, "after": 3,}
                # Calculate the total sum of weights
                total_weight_sum = sum(weights.values())
                # Get the maximum weight
                max_weight = max(weights.values())
                # Calculate the normalized weights
                normalized_weights = {k: v / total_weight_sum for k, v in weights.items()}
                # Calculate the weighted sum of position preferences
                weighted_sum = sum(normalized_weights[preference] for preference in position_preferences)
                # Calculate the average position preference score (P) between 0 and 1
                P = weighted_sum / max_weight
                # Calculate priority score for the campaign, or set to None if emissions do not exist
                priority_score = calculate_priority_score(TSA=TSA, ARF=ARF, U=U, P=P )
                # Append campaign ID and priority score to the list
                priority_score_list.append({"campaign": campaign, "priority_score": priority_score })
                # Create CampaignPriorityScores object to store priority score
                try:
                    # Try to retrieve an existing CampaignPriorityScores object
                    priority_scores_obj = CampaignPriorityScores.objects.get(campaign=campaign)
                    # Update the existing object
                    priority_scores_obj.priority_score = priority_score
                    priority_scores_obj.save()
                except CampaignPriorityScores.DoesNotExist:
                    # Create a new CampaignPriorityScores object if it doesn't exist
                    CampaignPriorityScores.objects.create(
                        campaign=campaign,
                        priority_score=priority_score
                    )
            # Order priority_score_list from bigger to smaller priority scores
            priority_score_list = sorted(priority_score_list, key=lambda x: x["priority_score"], reverse=True)

            # Step 6:
            # List to store ad spots to be inserted
            ad_spots_to_insert = []
            # Initialize remaining duration_seconds
            remaining_duration_seconds = duration_seconds -  6 # 10

            # ??? Need To Be Updated To Set Max Ads In The Avail
            # !!! Upgrade 06-05-2024
            min_duration = 8 # 35
            # Check if duration is >= 60 seconds and region is Netherlands, or if region is France
            if (int(duration_seconds) >= min_duration and  region_test == "Netherlands") or region_test == "France" :

                # Iterate through priority_score_list
                for entry in priority_score_list:
                    # Retrieve ad spots for the current campaign with duration condition and ordered by duration
                    list_adspot = Adspots.objects.filter(
                        # id_channel=channel_id,
                        id_campaign = entry["campaign"].id_campaign,
                        duration__lte = remaining_duration_seconds
                    ).order_by("duration")
                    # Check if ad spots exist for the current campaign
                    if list_adspot.exists():
                        # Get the first ad spot with the shortest duration
                        ad_spot = list_adspot.first()
                        # # Randomly select an ad spot from those with the shortest duration
                        # ad_spot = random.choice(list_adspot)
                        # Check if ad_spots_to_insert is not empty then
                        if ad_spots_to_insert:
                            # from functools import reduce
                            # exclusion_conditions = reduce(lambda x, spot: x | Q(ad_spot_1=spot.id_adpost, ad_spot_2_id=ad_spot.id_adpost) | Q(ad_spot_1_id=ad_spot.id_adpost, ad_spot_2_id=spot.id_adpost), ad_spots_to_insert, Q())

                            # Initialize an empty Q object to store the exclusion conditions
                            exclusion_conditions = Q()
                            # Loop over the ad_spots_to_insert list
                            for spot in ad_spots_to_insert:
                                # Build the exclusion conditions for the current spot
                                exclusion_conditions |= Q(ad_spot_1=spot.id_adpost, ad_spot_2_id=ad_spot.id_adpost) | Q(ad_spot_1_id=ad_spot.id_adpost, ad_spot_2_id=spot.id_adpost)
                            # Check if the current ad_spot has an exclusion relationship with any of the ad_spots_to_insert
                            if not AdSpotExclusion.objects.filter(exclusion_conditions).exists():
                                # If no relation exists, proceed with :
                                # Calculate the remaining duration_seconds for the next campaign
                                remaining_duration_seconds -= ad_spot.duration
                                # Append the ad spot to the list of ad spots to be inserted
                                ad_spots_to_insert.append(ad_spot)
                            else:
                                # If an exclusion relationship exists, skip this ad_spot
                                print(f"Ad spot {ad_spot.id_adpost} has an exclusion relation with the already existing ad spots. Skipping...")
                        else :
                            # Calculate the remaining duration_seconds for the next campaign
                            remaining_duration_seconds -= ad_spot.duration
                            # Append the ad spot to the list of ad spots to be inserted
                            ad_spots_to_insert.append(ad_spot)


            playlist = None
            # Check if ad_spots_to_insert is not empty and its count is greater than or equal to 1
            if ad_spots_to_insert and len(ad_spots_to_insert) >= 1:

                # Retrieve all Adspots containing "Filler" in their adspot_name
                # adspot_fillers = Adspots.objects.filter(adspot_name__contains="Filler")
                # Get one random Adspot object from the queryset
                # random_filler_adspot = adspot_fillers.order_by("?").first()
                # Append the ad-filler spot to the list of ad spots to be inserted
                # ad_spots_to_insert.append(random_filler_adspot)
                # random_filler_adspots = adspot_fillers.order_by("?")[:4]  # Get 4 random adspots from the queryset
                # ad_spots_to_insert += list(random_filler_adspots)

                adspot_fillers = Adspots.objects.filter(adspot_name__contains="Filler_Ma_10m", status="1")
                random_filler_adspots = adspot_fillers
                ad_spots_to_insert += list(random_filler_adspots)  # Append the adspots to the list of ad spots to be inserted


                # !!! Edit Need To Test [Added At 06-05-2024 11:59:59 PM]

                # Querying playlists based on the broadcast date and channel zone ID, ordered by creation datetime in descending order.
                playlists = Playlists.objects.filter(
                    broadcastdate=f"{start_date}",
                    id_zone_channel_id=channel_zone.id_zone_channel
                ).order_by("-creation_datetime")
                # Checking if playlists exist and ensuring there is at least one playlist available for further processing.
                if playlists.exists() and playlists.count() > 0:
                    # Fetching the first playlist object from the queryset, which represents the last inserted playlist, if available.
                    old_playlist = playlists.first()
                    # Fetch All Windows For The Definded Playlist
                    playlist_windows = Windows.objects.filter(id_playlist = old_playlist)
                    # Create a copy of the playlist object
                    playlist = old_playlist
                    playlist.id_playlist = None  # Set primary key to None to create a new object
                    playlist.version = f"{ playlists.count() + 1 }"
                    playlist.creation_datetime = timezone.now()
                    playlist.save()
                    # Iterate over each window
                    for _, window in enumerate(playlist_windows):
                        # Get avails for the current window
                        window_avails = Avails.objects.filter( id_window = window )
                        # Create a new Window object for new Playlist
                        new_window = window
                        new_window.id_window = None  # Set primary key to None to create a new object
                        new_window.id_playlist = playlist
                        new_window.save()
                        # Iterate over each avail for the current window
                        for _, avail in enumerate(window_avails):
                            # Get AdSpotsInAvail for the current avail
                            adspots_in_avail = AdspotsInAvail.objects.filter(id_avail=avail)
                            # Create a new Avail object for new Window
                            new_avail = avail
                            new_avail.id_avail = None  # Set primary key to None to create a new object
                            new_avail.id_window = new_window
                            new_avail.save()
                            # Iterate over each AdSpotInAvail for the current avail
                            for _, adspot_in_avail in enumerate(adspots_in_avail):
                                # Create a new Avail object for new Window
                                old_adspot_in_avail = adspot_in_avail
                                old_adspot_in_avail.id_adsinavail = None  # Set primary key to None to create a new object
                                old_adspot_in_avail.id_avail = new_avail
                                old_adspot_in_avail.save()
                else:
                    # Creating a new playlist object with the provided parameters and the current timezone-aware creation datetime.
                    playlist = Playlists.objects.create(
                        id_channel_id=channel_id ,
                        version="1",
                        broadcastdate=f"{start_date}",
                        start_date=f"{start_date}T00:01:00+00:00",
                        end_date=f"{start_date}T23:59:00+00:00",
                        creation_datetime=timezone.now(),
                        id_zone_channel_id=channel_zone.id_zone_channel,
                        is_draft="0",
                        draft_version="0"
                    )

                # Get All Windows For A Definded Playlist
                playlist_windows = Windows.objects.filter(id_playlist = playlist)
                # Create And Save New Window For Playlist Using Detected Ad-break Information With
                # A Fix Interval In 10 Minutes ( 10*60 = 600 seconds )
                window_durations = datetime.strptime(str(timedelta(seconds=(28*60 + int(duration_seconds)))), "%H:%M:%S")
                # window_durations = datetime.strptime(str(timedelta(seconds=(38*60 + int(duration_seconds)))), "%H:%M:%S")
                window_durations = window_durations.strftime("%H%M%S00")

                window_start = (start_datetime - timedelta(seconds=2*60) ).strftime("%Y-%m-%d %H:%M:%S")
                #??  we need to check if windows start is less then 12:00 am we need to get avail start time
                # window_start = (start_datetime - timedelta(seconds=12*60) ).strftime("%Y-%m-%d %H:%M:%S")
                window_end = (start_datetime + timedelta(seconds=(26*60 + int(duration_seconds))) ).strftime("%Y-%m-%d %H:%M:%S")
                # Check If The Playlist Has At least One Window
                # If The Playlist Has Not Been Initialized Yet With A Window Then
                if not playlist_windows.exists():
                    # Create And Save New Window
                    playlist_window = Windows.objects.create(
                        id_playlist = playlist,
                        window_start = window_start,
                        window_end = window_end,
                        window_duration = window_durations
                    )
                # If The Playlist Has Been Initialized With A Window Or A Window's List
                # We Will Get The Last Window
                else:
                    # Get The Last Window From The Windows List
                    playlist_window = playlist_windows.last()
                    # Get The Last Window Start Time And End Time Then Convert Them To DateTime Type
                    windows_start_at = datetime.strptime(playlist_window.window_start, "%Y-%m-%d %H:%M:%S")
                    windows_end_at = datetime.strptime(playlist_window.window_end, "%Y-%m-%d %H:%M:%S")
                    # Assuming windows_start_at and windows_end_at are timezone-naive datetimes
                    windows_start_at = timezone.make_aware(windows_start_at, timezone.get_current_timezone())
                    windows_end_at = timezone.make_aware(windows_end_at, timezone.get_current_timezone())
                    # Now check Check if the ad-break start time is within the window
                    if windows_start_at <= start_datetime <= windows_end_at:

                        # Convert the window duration string to a datetime object and add the duration_seconds
                        # window_durations = datetime.strptime(playlist_window.window_duration, "%H%M%S00") + datetime.strptime(str(timedelta(seconds=(10*60 + int(duration_seconds)))), "%H:%M:%S")
                        window_durations = datetime.strptime(playlist_window.window_duration, "%H%M%S00") + timedelta(seconds=(26*60 + int(duration_seconds)))
                        # Convert the resulting datetime object back to a string in the format "%H%M%S00"
                        window_durations = window_durations.strftime("%H%M%S00")

                        playlist_window.window_end = (windows_end_at + timedelta(minutes=26, seconds=duration_seconds)).strftime("%Y-%m-%d %H:%M:%S")
                        playlist_window.window_duration =  window_durations
                        playlist_window.save()
                    else:
                        # Create And Save New Window
                        playlist_window = Windows.objects.create(
                            id_playlist = playlist,
                            window_start = window_start,
                            window_end = window_end,
                            window_duration = window_durations
                        )
                # Count Of Window Avail In The Playlist Window
                window_avail_count = Avails.objects.filter( id_window = playlist_window ).count()
                # Create and save Window Avail object
                window_avail = Avails.objects.create(
                    id_window = playlist_window,
                    #?? fix this :: need to check
                    avail_start = start_datetime.strftime("%Y-%m-%d %H:%M:%S"),
                    availinwindow = f"{window_avail_count + 1}",
                    datetime = datetime.now()
                )
                #
                traffic = 0
                # Get the last inserted window in the playlist
                last_window = Windows.objects.filter(id_playlist=playlist).last()
                #
                if last_window:
                    # Get the last inserted avail in the window
                    last_avail = Avails.objects.filter(id_window=last_window).last()
                    #
                    if last_avail:
                        # Get the last inserted available link in the avail
                        last_avail_link = AdspotsInAvail.objects.filter(id_avail=last_avail).last()
                        #
                        if last_avail_link:
                            #
                            traffic = last_avail_link.trafficid
                #
                airtime_start = start_datetime
                #
                for index, ad_spot in enumerate(ad_spots_to_insert):
                    #
                    traffic +=1
                    #
                    AdspotsInAvail.objects.create(
                        id_avail = window_avail,
                        id_adspot = ad_spot,
                        positioninavail = index+1,
                        trafficid = traffic
                    )
                    #
                    # Check if a Campaignairtimelog with the same values already exists
                    existing_Campaignairtimelog = Campaignairtimelog.objects.filter(
                        campaign = ad_spot.id_campaign,
                        spot = ad_spot,
                        ad_break = ad_break
                    ).exists()
                    # If the entry doesn't exist, create it
                    if not existing_Campaignairtimelog:
                        Campaignairtimelog.objects.create(
                            campaign = ad_spot.id_campaign,
                            spot = ad_spot,
                            ad_break = ad_break,
                            airtime_start = airtime_start,
                            airtime_end = airtime_start + timedelta(seconds=ad_spot.duration),
                            insertion_status = True
                        )
                    #
                    airtime_start = airtime_start + timedelta(seconds=ad_spot.duration)
                #
                xml_playlist_res = generateSchedule(playlist)
                # Check the result
                if xml_playlist_res["status"]:
                    ftp_host = "172.16.205.1"
                    ftp_port =  60000
                    ftp_username = "SanoaMedia"
                    ftp_password = "SanoaMedia"
                    local_file_path = xml_playlist_res["file"]
                    if playlist.id_zone_channel.networkname == "REGNL":
                        remote_file_path = "2M/schedules/REG1_NL"
                        # uploadFTP(ftp_server, ftp_port, ftp_user, ftp_password, xml_file_path , ftp_upload_path)
                        ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
                        # Upload a file to the server
                        ftp.upload_file(local_file_path, remote_file_path)
                    elif playlist.id_zone_channel.networkname == "2M":
                        remote_file_path = "2M/schedules/REG1_FR"
                        # # uploadFTP(ftp_server, ftp_port, ftp_user, ftp_password, xml_file_path , ftp_upload_path)
                        # ftp = FTPConnector(ftp_host, ftp_port, ftp_username, ftp_password)
                        # # Upload a file to the server
                        # ftp.upload_file(local_file_path, remote_file_path)
                else:
                    print(f"Failed to generate playlist file. Error: {xml_playlist_res['message']}")

            # send Message to telegram
            message_campaign_score = ' '.join(f" * [{campaign_score['campaign'].id_campaign}][{campaign_score['campaign'].name}][{campaign_score['priority_score']}] \n" for campaign_score in priority_score_list)
            alert_message = (
                "🚨 AD-Break Detected! 🚨\n"
                f"Detected At: {start_date_time_str} \n"
                f"====================\n"
                f"Region : {region_test} \n"
                f"====================\n"
                f"With Duration: {duration_seconds } Seconds \n"
                f"====================\n"
                f"With Duration To Be Replaced: {duration_seconds - 10 } Seconds \n"
                f"==================== \n"
                f"This Will Update Playlist With ID: {playlist.id_playlist if playlist != None else ''} \n"
                f"====================\n"
                f"Based On Emission: {emissions.first().emission_name if emissions.first() else ''} With Genre {emissions.first().genre if emissions.first() else ''} \n"
                f"====================\n"
                f"With Active Campaigns \n {message_campaign_score}"
                f"====================\n"
                f"With This Ad_Spots {[ ad_spot.adspot_name  for ad_spot in ad_spots_to_insert]}"
            )
            self.telegram_bot.send_telegram_log(alert_message)

            return JsonResponse({"message": "Ad playlist created successfully"}, status=200)

        except Exception as e:
            raise e

class StaticVerifsValidation(View):
    """
        A view to handle static verifications validation.

        This view is responsible for handling GET requests related to static verifications validation
        for a playlist identified by its primary key id.
    """
    def get(self, request, playlist_id):
        """
            Handle GET requests.

            Retrieve the playlist object with the specified primary key `playlist_id`.
            Retrieve all adbreaks and windows associated with the playlist.
            For each window, retrieve adbreaks within the window and corresponding avails.

            Args:
                request (HttpRequest): The HTTP request object.
                playlist_id (int): The primary key of the playlist to retrieve.

            Returns:
                HttpResponse: A JSON response containing the adbreaks, windows, and adspots information.
        """
        from datetime import datetime, timedelta
        # Retrieve the playlist object with the specified primary key id_playlist
        playlist = Playlists.objects.filter(id_playlist=playlist_id)
        if playlist:
            playlist = playlist.first()
            # Calculate the broadcast date by replacing dashes in the playlist's broadcast date
            broadcastdate = playlist.broadcastdate.replace("-", "")
            # Retrieve all adbreaks associated with the playlist's start date
            adbreaks = Adbreaks.objects.filter(start_at__date=playlist.broadcastdate)
            # Retrieve all windows associated with the playlist
            windows = Windows.objects.filter(id_playlist=playlist.id_playlist)
            #
            static_verifs = []
            # Iterate through each window
            for window in windows:
                # Retrieve adbreaks within the current window
                window_ad_start =  datetime.strptime(window.window_start, "%Y-%m-%d %H:%M:%S") - timedelta(hours=2)
                window_ad_end =  datetime.strptime(window.window_end, "%Y-%m-%d %H:%M:%S") - timedelta(hours=2)
                adbreaks_in_window = Adbreaks.objects.filter(
                    start_at__gte=window_ad_start,
                    start_at__lte=window_ad_end
                )
                # Retrieve avails associated with the current window
                window_avails = Avails.objects.filter(id_window=window)

                avails_count = window_avails.count()
                adbreaks_count = adbreaks_in_window.count()

                if avails_count <= adbreaks_count:
                    # Iterate through each avail
                    for index, avail in enumerate(window_avails):
                        # Retrieve adspots within the current avail
                        adspots_in_avail = AdspotsInAvail.objects.filter(id_avail=avail)
                        #
                        remaining_duration_seconds = adbreaks_in_window[index].duration
                        #
                        airTime = adbreaks_in_window[index].start_at
                        for adspot_in_avail in adspots_in_avail:
                            verif_spot = {
                                "networkname": playlist.id_zone_channel.networkname,
                                "zonename": playlist.id_zone_channel.zonename,
                                "broadcastDate": broadcastdate,
                                "trafficId": adspot_in_avail.trafficid,
                                "spotId": adspot_in_avail.id_adspot.adspot_name,
                                "airTime": airTime.strftime("%Y-%m-%d %H:%M:%S"),
                                "airLength": time.strftime("%H%M%S00", time.gmtime(int(adspot_in_avail.id_adspot.duration))),
                                "airStatusCode": "0001" if remaining_duration_seconds >= adspot_in_avail.id_adspot.duration else "0008",
                                "revision": playlist.version,
                                "is_active": True
                            }
                            # Or Add It To DataBase
                            new_ad_verif = Verifs(
                                networkname= playlist.id_zone_channel.networkname,
                                zonename= playlist.id_zone_channel.zonename,
                                broadcastDate= broadcastdate,
                                trafficId= adspot_in_avail.trafficid,
                                spotId= adspot_in_avail.id_adspot.adspot_name,
                                airTime= airTime.strftime("%Y-%m-%d %H:%M:%S"),
                                airLength= time.strftime("%H%M%S00", time.gmtime(int(adspot_in_avail.id_adspot.duration))),
                                airStatuscode= "0001" if remaining_duration_seconds >= adspot_in_avail.id_adspot.duration else "0008",
                                revision= playlist.version,
                                vercomplete = True
                            )
                            new_ad_verif.save()
                            remaining_duration_seconds -= adspot_in_avail.id_adspot.duration
                            airTime += timedelta(seconds=adspot_in_avail.id_adspot.duration)
                            # Append verif_spot to static_verifs
                            static_verifs.append(verif_spot)
                else:
                    # Iterate through each adbreak
                    for index, adbreak in enumerate(adbreaks_in_window):
                        # Retrieve adspots within the current avail
                        adspots_in_avail = AdspotsInAvail.objects.filter(id_avail=window_avails[index])
                        #
                        remaining_duration_seconds = adbreak.duration
                        #
                        airTime = adbreak.start_at
                        for adspot_in_avail in adspots_in_avail:
                            verif_spot = {
                                "networkname": playlist.id_zone_channel.networkname,
                                "zonename": playlist.id_zone_channel.zonename,
                                "broadcastDate": broadcastdate,
                                "trafficId": adspot_in_avail.trafficid,
                                "spotId": adspot_in_avail.id_adspot.adspot_name,
                                "airTime": airTime.strftime("%Y-%m-%d %H:%M:%S"),
                                "airLength": time.strftime("%H%M%S00", time.gmtime(int(adspot_in_avail.id_adspot.duration))),
                                "airStatusCode": "0001" if remaining_duration_seconds >= adspot_in_avail.id_adspot.duration else "0008",
                                "revision": playlist.version,
                                "is_active": True
                            }
                            # Or Add It To DataBase
                            new_ad_verif = Verifs(
                                networkname= playlist.id_zone_channel.networkname,
                                zonename= playlist.id_zone_channel.zonename,
                                broadcastDate= broadcastdate,
                                trafficId= adspot_in_avail.trafficid,
                                spotId= adspot_in_avail.id_adspot.adspot_name,
                                airTime= airTime.strftime("%Y-%m-%d %H:%M:%S"),
                                airLength= time.strftime("%H%M%S00", time.gmtime(int(adspot_in_avail.id_adspot.duration))),
                                airStatuscode= "0001" if remaining_duration_seconds >= adspot_in_avail.id_adspot.duration else "0008",
                                revision= playlist.version,
                                vercomplete = True
                            )
                            new_ad_verif.save()
                            remaining_duration_seconds -= adspot_in_avail.id_adspot.duration
                            airTime += timedelta(seconds=adspot_in_avail.id_adspot.duration)
                            # Append verif_spot to static_verifs
                            static_verifs.append(verif_spot)
            return JsonResponse(
                {
                    "playlist":{
                        "id": playlist.id_playlist,
                        "version": playlist.version,
                        "broadcastdate": broadcastdate,
                        "start_date": playlist.start_date,
                        "end_date": playlist.end_date,
                        "is_draft": playlist.is_draft,
                        "draft_version": playlist.draft_version,
                        "creation_datetime": playlist.creation_datetime,
                        "draft_version": playlist.draft_version,
                        "channel": {
                            "id": playlist.id_channel.id_channel,
                            "name": playlist.id_channel.channel_name,
                        },
                        "region": {
                            "id": playlist.id_zone_channel.id_zone_channel,
                            "region": playlist.id_zone_channel.region,
                            "zonename": playlist.id_zone_channel.zonename,
                            "networkname": playlist.id_zone_channel.networkname,
                        },
                    },
                    "verifs": static_verifs,
                    "status": "success",
                    "message": "Static verifications validation successful"
                }
            )
        else:
            return JsonResponse(
                {
                    "status": "Failed",
                    "message": "Playlist Not Found"
                }
            )

class AdsFromConductor(View):
    template_name = "conductor/index.html"

    def get(self, request):
        from datetime import datetime

        # Get the selected date from the request.GET dictionary and convert it to a datetime object
        current_date = datetime.strptime(request.GET.get('date'), '%Y-%m-%d').date() if request.GET.get('date') else datetime.now().date()
        # Query ConductorData objects where ad_break_start_at__date is equal to current_date
        conductor_data_objects = ConductorData.objects.filter(date=current_date)
        # Render the template with the current date included in the context
        return render(
            request,
            self.template_name,
            {
                'current_date': current_date,
                'ads_planning': conductor_data_objects
            }
        )

    def post(self, request):

        import pandas as pd
        from .utils import hhmmssf_to_seconds

        if "conductor_file" in request.FILES:
            conductor_file = request.FILES["conductor_file"]
            channel_name = request.POST["channel"] if ("channel" in request.POST and request.POST["channel"]) else "2M"
            file_date = request.POST["date"]
            if conductor_file.name.endswith(".xlsx") or conductor_file.name.endswith(".csv"):
                # # Define the directory where you want to save the file
                # upload_dir = f"{os.getcwd()}/files/conductor/2M"
                # # Ensure the directory exists, create it if it doesn't
                # os.makedirs(os.path.dirname(upload_dir), exist_ok=True)
                # # Save the file to the specified directory
                # file_path = os.path.join(upload_dir, conductor_file.name)
                # with open(file_path, 'wb+') as destination:
                #     destination.write(conductor_file)
                # Read the Excel file
                try:
                    # Read the Excel file into a DataFrame
                    data_conductor = pd.read_excel(conductor_file)
                    # Extract specific columns
                    data_conductor = data_conductor.iloc[:, [1, 2, 3, 4, 6, 7, 10]]  # Assuming B,C,D,E,G,H,K corresponds to 1,2,3,4,6,7,10 respectively
                    # Replace line breaks in column names with spaces
                    data_conductor.columns = data_conductor.columns.str.replace("\n", " ", regex=True)
                    # Replace '\n' with space in all columns
                    data_conductor = data_conductor.replace("\n", " ", regex=True)
                    # Drop rows where all specified columns contain NaN values
                    data_conductor = data_conductor.dropna(subset=["Source de Diffusion", "Media ID", "Video ID", "Titre", "Durée"], how="all")
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Iterate over each row in the DataFrame
                    for index, row in data_conductor.iterrows():
                        # Check if all specified columns are NaN
                        if (pd.isna(row['Heure de début']) and pd.isna(row['Source de Diffusion']) and pd.isna(row['Media ID']) and pd.isna(row['Durée'])):
                            # Get the "Titre" value and Add it to the previous row's "Titre" value if the current row is not the first row
                            data_conductor.at[index - 1, "Titre"] += " " + str(row['Titre'])
                            # Remove the current row
                            data_conductor.drop(index, inplace=True)
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Replace NaN values in "Video ID" column with an empty string
                    data_conductor["Video ID"] = data_conductor["Video ID"].fillna("")
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Remove rows where the Video ID contains "BA" or "MOSQUE"
                    data_conductor = data_conductor[~data_conductor["Video ID"].astype(str).str.contains("BA")]
                    data_conductor = data_conductor[~data_conductor["Video ID"].astype(str).str.contains("MOSQUE")]
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Remove rows where the "Heure de début" column contains "** BREAK **"
                    data_conductor = data_conductor[data_conductor["Heure de début"] != "** BREAK **"]
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Replace '(MASTER DIGITAL)' & '(XDCAM)' & '(VERSION HABILLEE)' & ... with none in all columns
                    data_conductor = data_conductor.replace("\(MASTER DIGITAL\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(XDCAM\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION HABILLEE\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION S/T FRANCAIS\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(MASTER\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION CENSUREE\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(COPIE DE REMPLACEMENT\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(COPIE DE REMPLACEMEN\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION AVEC SPONSOR\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION CORRIGEE\)", "", regex=True)
                    data_conductor = data_conductor.replace("\( HABILLEE DOUBLEE EN DAR\)", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION HABILLEE DOUBLEE EN DAR\)", "", regex=True)
                    #
                    data_conductor = data_conductor.replace("\(VERSION AVEC SPONS", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION AVEC SP", "", regex=True)
                    data_conductor = data_conductor.replace("\(COPIE DE REMPLACEM", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION DOUBLEE EN DARI", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION CORRIGE", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSION CORRIG", "", regex=True)
                    data_conductor = data_conductor.replace("\(VERSIO", "", regex=True)
                    data_conductor = data_conductor.replace("\(COPIE", "", regex=True)
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Remove rows where the Video ID contains "Dans les 7 jours - BA"
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("Dans les 7 jours - BA")]
                    # Remove rows where the Video ID contains "Dans les 7 jours - SPOT"
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("Dans les 7 jours - SPOT")]
                    # Remove rows where the Video ID contains "CLIP :"
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("CLIP :")]
                    # Remove rows where the Video ID contains "TAJNID"
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("TAJNID")]
                    # Remove rows where the Video ID contains "TAJNID"
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("VERSION PROMO - SPOT")]
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("VERSION PROMO - BA")]
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("VERSION PROMO - CAPSULE")]
                    data_conductor = data_conductor[~data_conductor["Titre"].astype(str).str.contains("SPOT :")]
                    # Applying strip to remove leading and trailing spaces
                    data_conductor["Titre"] = data_conductor["Titre"].astype(str).str.strip()
                    data_conductor["Titre"] = data_conductor["Titre"].astype(str).str.lstrip()
                    #
                    data_conductor["Titre"] = data_conductor["Titre"].str.replace(r'EPI#\d{0,9}:', '', regex=True)
                    data_conductor["Titre"] = data_conductor["Titre"].str.replace(r'EPi#\d{0,9}:', '', regex=True)
                    data_conductor["Titre"] = data_conductor["Titre"].str.replace(r'Epi#\d{0,9}:', '', regex=True)
                    data_conductor["Titre"] = data_conductor["Titre"].str.replace(r'epi#\d{0,9}:', '', regex=True)
                    data_conductor["Titre"] = data_conductor["Titre"].str.replace(r'NOUREDDINE HADDIOUI -HIJAZI- ', '', regex=True)
                    # Reset the index
                    data_conductor.reset_index(drop=True, inplace=True)
                    # Replace values matching the regex pattern with an empty string
                    data_conductor = data_conductor.replace("^S-\d{0,9}\/\d{0,9} :", "", regex=True)
                    # Initialize variables to track start and end markers
                    start_found = False
                    start_index = None
                    # Iterate over each row in the DataFrame
                    for index, row in data_conductor.iterrows():
                        # Check if the current row represents the start marker
                            # JP20NJ
                            # JP20WJ
                            # F0283832
                            # JP20SS
                            # JP20NS
                        # List of values to check 
                        values_to_check = [
                            'JINCRYSTAL', 'JINOCEAN', 'JINETOILE', 'IDPUB', 'IPM', 'JP20NJ', 'JP20WJ', 'F0283832', 
                            'JP20SS', 'JP20NS', 'JINRAMA1', 'JINRAMA2', 'JINRAMA3', 'JINRAMA4', 'IPG', 
                            'JP20BS'
                        ]
                        # values_to_check = ['IDPUB', 'IPM', 'JP20NJ', 'JP20WJ', 'F0283832', 'JP20SS', 'JP20NS']
                        if not start_found and any(value in str(row['Video ID']) for value in values_to_check):
                            start_found = True
                            start_index = index
                        # If start marker is found, check if the current row represents the end marker
                        elif start_found and any(value in str(row['Video ID']) for value in values_to_check):
                            # Initialize a flag to track if the start marker is found
                            start_found = False
                            # Store the "Titre" value from the row before the current index in show_before
                            before_index = start_index
                            # Fetch the data for the current row with index 'start_index

                            # 

                            row = data_conductor.iloc[before_index]
                            show_before_title = str(row["Titre"])
                            show_before_video_id = str(row["Video ID"])
                            show_before_duration = str(row["Durée"])
                            # show_before_duration = ':'.join(f"{int(p):02d}" for p in show_before_duration.split(':')[:3]) if show_before_duration and len(show_before_duration.split(':')) >= 3 else show_before_duration

                            # Iterate backwards through the DataFrame
                            while before_index >= 0:
                                # Check if the condition for the previous row is met
                                #   and (show_before_episode is not None and show_before_episode != 0 and show_before_episode != "")
                                if (show_before_duration != "nan" and show_before_duration != "" and show_before_duration != None) and ("BA" not in show_before_video_id) :
                                    # Convert duration to seconds and check if it's greater than or equal to 120
                                    show_before_duration = hhmmssf_to_seconds(show_before_duration)
                                    if show_before_duration >= 60 and "00:07:00" not in show_before_title and "00:09:00" not in show_before_title:
                                        # If condition is met, exit the loop
                                        break
                                # Move to the previous row
                                before_index -= 1
                                # If there are more rows to check
                                if before_index >= 0:
                                    # Fetch the data for the previous row
                                    row = data_conductor.iloc[before_index]
                                    show_before_title = str(row["Titre"])
                                    show_before_video_id = str(row["Video ID"])
                                    show_before_duration = str(row["Durée"])
                                else:
                                    # If no more rows to check, break out of the loop
                                    break

                            # Store the "Titre" value from the row after the current index in show_after
                            after_index = index
                            # Fetch the data for the next row
                            row = data_conductor.iloc[after_index]
                            show_after_title = str(row["Titre"])
                            show_after_video_id = str(row["Video ID"])
                            show_after_duration = str(row["Durée"])
                            # Iterate forwards through the DataFrame
                            while after_index < len(data_conductor):
                                # Check if the condition for the next row is met
                                #  and (show_after_episode is not None and show_after_episode != 0 and show_after_episode != "")
                                if (show_after_duration != "nan" and show_after_duration != "" and show_after_duration != None) and ("BA" not in show_after_video_id):
                                    # Convert duration to seconds and check if it's greater than or equal to 120
                                    show_after_duration = hhmmssf_to_seconds(show_after_duration)
                                    if show_after_duration >= 60 and "00:07:00" not in show_after_title and "00:09:00" not in show_after_title:
                                        # If condition is met, exit the loop
                                        break
                                # Move to the next row
                                after_index += 1
                                # If there are more rows to check
                                if after_index < len(data_conductor):
                                    # Fetch the data for the next row
                                    row = data_conductor.iloc[after_index]
                                    show_after_title = str(row["Titre"])
                                    show_after_video_id = str(row["Video ID"])
                                    show_after_duration = str(row["Durée"])
                                else:
                                    # If no more rows to check, break out of the loop
                                    break

                            # Slice the DataFrame from start_index+1 to index to get pub_data
                            pub_data = data_conductor.iloc[(start_index + 1):index]
                            # Filter rows for Début publicité
                            debut_pub_data = pub_data[pub_data['Source de Diffusion'] == 'Début publicité']
                            # Get Heure de début for Début publicité
                            ad_break_start_at = debut_pub_data['Heure de début'].values[0]
                            ad_break_start_at = ':'.join(f"{int(p):02d}" for p in ad_break_start_at.split(':')[:3]) if ad_break_start_at and len(ad_break_start_at.split(':')) >= 3 else ad_break_start_at

                            # Filter rows for Fin Publicité
                            fin_pub_data = pub_data[pub_data['Source de Diffusion'] == 'Fin Publicité']
                            # Get Heure de fin for Fin publicité
                            ad_break_end_at = fin_pub_data['Heure de début'].values[0]
                            ad_break_end_at = ':'.join(f"{int(p):02d}" for p in ad_break_end_at.split(':')[:3]) if ad_break_end_at and len(ad_break_end_at.split(':')) >= 3 else ad_break_end_at

                            # Get Durée for Fin Publicité
                            ad_break_duration = fin_pub_data['Durée'].values[0]

                            ConductorData.objects.create(
                                ad_break_start_at = ad_break_start_at,
                                ad_break_end_at   = ad_break_end_at,
                                duree             = ad_break_duration,
                                show_before       = show_before_title,
                                show_after        = show_after_title,
                                if_show_during    = (show_before_title == show_after_title),
                                channel           = channel_name,
                                date              = file_date
                            )
                    return JsonResponse({"status": "Success", "message": "Ads extracted successfully."})
                except Exception as e:
                    
                    import traceback
                    tb = traceback.extract_tb(e.__traceback__)
                    filename, line_num, func_name, text = tb[-1]  # Get the last frame (where error occurred)
                    
                    error_details = {
                        "status": "Failed",
                        "message": str(e),
                        "error_type": type(e).__name__,
                        "file": filename,
                        "line": line_num,
                        "function": func_name,
                        "code": text
                    }
                    
                    print(f"Error in {filename}:{line_num} in {func_name}(): {e}")
                    return JsonResponse(error_details)
                    # print(e)
                    # return JsonResponse({"status": "Failed", "message": str(e)})
            else:
                return JsonResponse({"status": "Failed", "message": "Please upload a valid Excel file."})
        else:
            return JsonResponse({"status": "Failed", "message": "No file uploaded."})

class VerifsStaticInsert(View):
    template_name = "verifs/index.html"

    def get(self, request):
        # Render the template with the current date included in the context
        return render(request, self.template_name, {})

    def post(self, request):

        if "verifs_file" in request.FILES:

            verifs_file = request.FILES["verifs_file"]

            if verifs_file.name.endswith(".ver"):
                #
                try:
                    import xml.etree.ElementTree as ET

                    # All operations in one line
                    last_part = os.path.splitext(verifs_file.name)[0].split('-')[-1]
                    #
                    tree = ET.parse(verifs_file)
                    root = tree.getroot()

                    results = []
                    namespace = {"ns": "http://www.scte.org/schemas/118-3/201X"}

                    for cue_message in root.findall("ns:CueMessage", namespace):
                        network_name = root.attrib.get("networkName")
                        zone_name = root.attrib.get("zoneName")

                        # Retrieve the channel zone information associated with the playlist's verifs file
                        channel_zone = ChannelsZone.objects.filter(
                            networkname=network_name,
                            verifs_number=last_part
                        ).first()

                        print(channel_zone)

                        broadcast_date = root.attrib.get("broadcastDate")

                        for spot in cue_message.findall("ns:Spot", namespace):
                            traffic_id = spot.attrib.get("trafficId")
                            spot_id = spot.attrib.get("spotId")
                            air_time = spot.attrib.get("airTime")
                            air_length = spot.attrib.get("airLength")
                            air_status_code = spot.attrib.get("airStatusCode")
                            revision = spot.attrib.get("revision")

                            results.append({
                                "networkname": channel_zone.id_channel.channel_name,
                                "zonename": channel_zone.id_zone_channel,
                                "broadcastDate": broadcast_date,
                                "trafficId": traffic_id,
                                "spotId": spot_id,
                                "airTime": air_time.replace('T', ' ').split('+')[0],
                                "airLength": air_length,
                                "airStatusCode": air_status_code,
                                "revision": revision
                            })

                            Verifs.objects.update_or_create(
                                networkname = channel_zone.id_channel.channel_name,
                                zonename = channel_zone.id_zone_channel,
                                broadcastDate = broadcast_date,
                                trafficId = traffic_id,
                                spotId = spot_id,
                                airTime = air_time.replace('T', ' ').split('+')[0],
                                airLength = air_length,
                                airStatuscode = air_status_code,
                                revision = revision,
                                vercomplete = "false"
                            )


                    return JsonResponse({"status": "Success", "message": "Verifs Collected successfully.", "result": results})
                except Exception as e:
                    print(e)
                    return JsonResponse({"status": "Failed", "message": str(e)})
            else:
                return JsonResponse({"status": "Failed", "message": "Please upload a valid Verifs file."})
        else:
            return JsonResponse({"status": "Failed", "message": "No file uploaded."})



# Generating XML file from the Database
def generateSchedule(playlist):
    try :
        # Convert the broadcast date string to a datetime object and then format it to "YYYYMMDD" string
        broadcastDate = str((datetime.datetime.strptime(playlist.broadcastdate, "%Y-%m-%d")).strftime("%Y%m%d"))
        # Get the current working directory
        current_directory = os.getcwd()

        print(broadcastDate)
        print(playlist.id_channel.channel_name)
        print(playlist.id_zone_channel.region)
        print(playlist.broadcastdate)
        print(broadcastDate)
        print(playlist.id_zone_channel.zonename)
        print(playlist.id_zone_channel.networkname)
        print(playlist.version)
        # Construct the full file path
        # file_path = f"/var/www/html/DAI27/Adtlas_DAI/files/schedules/{playlist.id_channel.channel_name}/{playlist.id_zone_channel.region}/{playlist.broadcastdate}/{broadcastDate}-{playlist.id_zone_channel.zonename}-{playlist.id_zone_channel.networkname}-{playlist.version}.sch"
        file_path = f"{current_directory}/files/schedules/{playlist.id_channel.channel_name}/{playlist.id_zone_channel.region}/{playlist.broadcastdate}/{broadcastDate}-{playlist.id_zone_channel.zonename}-{playlist.id_zone_channel.networkname}-{str(playlist.version)}.sch"

        print(file_path)
        # ==============================================================================
        # insertion of Schedule tag data in the xml file
        root = xml.Element("Schedule")
        root.set("broadcastDate", str(broadcastDate))
        root.set("begDateTime", f"{str(playlist.broadcastdate)}T00:01:00+00:00")
        root.set("endDateTime", f"{str(playlist.broadcastdate)}T23:59:59+00:00")
        root.set("networkName", str(playlist.id_zone_channel.networkname))
        root.set("zoneName", str(playlist.id_zone_channel.zonename))
        root.set("revision", str(playlist.version))
        root.set("level", "0")
        root.set("xmlns", "http://www.scte.org/schemas/118-3/201X")
        root.set("schemaVersion", "http://www.w3.org/2001/XMLSchema")
        # ==============================================================================
        trafficid = 0
        # ==============================================================================
        # Fetch All Windows For The Definded Playlist
        playlist_windows = Windows.objects.filter(id_playlist = playlist)
        # ==============================================================================
        # Iterate over each window
        for index, window in enumerate(playlist_windows):
            # Create a new XML element for the window
            window_elem = xml.SubElement(root, "Window")
            window_elem.set("windowStart", f"{str(window.window_start).replace(' ', 'T')}+00:00")
            window_elem.set("windowDuration", window.window_duration)
        # ==============================================================================
            # Get avails for the current window
            window_avails = Avails.objects.filter( id_window = window )
            # Iterate over each avail for the current window
            for avail_index, avail in enumerate(window_avails):
                # Create a new XML element for the avail
                avail_elem = xml.SubElement(window_elem, "Avail")
                avail_elem.set("availStart", f"{str(avail.avail_start).replace(' ', 'T')}+00:00")
                avail_elem.set("availInWindow", avail.availinwindow)
                avail_elem.set("availNum", '0')
        # ==============================================================================
                # Get AdSpotsInAvail for the current avail
                adspots_in_avail = AdspotsInAvail.objects.filter(id_avail=avail)
                # Iterate over each AdSpotInAvail for the current avail
                for adspot_index, adspot in enumerate(adspots_in_avail):
                    trafficid+=1
                    adspot_duration = time.strftime("%H%M%S00", time.gmtime(int(adspot.id_adspot.duration)))
                    # Create a new XML element for the AdSpotInAvail
                    adspot_elem = xml.SubElement(avail_elem, "Spot")
                    adspot_elem.set("adId", f"{adspot.id_adspot.duration}sec")
                    adspot_elem.set("eventType", "LOI")
                    adspot_elem.set('length', adspot_duration)
                    adspot_elem.set("positionInAvail", str(adspot.positioninavail))
                    adspot_elem.set("schedSource", "Local")
                    adspot_elem.set("spotId", adspot.id_adspot.filename)
                    # adspots_array[ads_i].set('trafficId', str(adspot.trafficid))
                    adspot_elem.set("trafficId", str(trafficid))
        # ==============================================================================
        tree = xml.ElementTree(root)
        # ==============================================================================
        # Create directories if they don't exist
        os.makedirs(os.path.dirname(file_path), exist_ok=True)
        # Create the file
        with open(file_path, "wb") as file:
            tree.write(file, encoding="utf-8", xml_declaration=True)

        return {
            "status": True,
            "file": file_path,
            "message": "Playlist File Created Succesfully"
        }

    except Exception as e:
        return {
            "status": False,
            "file": None,
            "message": f"Exeption {e}"
        }


def send_vast(request):
    from threading import Thread
    thread = Thread(target=call_vast_api,args=("Leila","Feuilleton",62,47399))
    thread.start()
    print(thread.join())

def get_last_week(date):
    # Parse the string into a datetime object
    timestamp = datetime.datetime.strptime(date, "%Y-%m-%d %H:%M:%S")

    # Subtract one week
    one_week_ago = timestamp - timedelta(weeks=1)

    # Convert back to string representation

    return one_week_ago

class PlaylistVast(APIView):
    def post(self,request):
        from django.core import serializers

        last_week = get_last_week(request.POST.get('ads_schedule'))
        last_week_day = last_week.strftime('%Y-%m-%d')
        last_week_minut = last_week.strftime('%H:%M:00')
        # print(last_week_minut)
        # Get SFR analytics volume for last week
        try:
            analytics_data = Sfr_analytics.objects.get(
                day=last_week_day,
                minute=last_week_minut,
                sfr_channel_name="2M Maroc"
            )
            total_volume = round(float(analytics_data.purcent) * 4500000 / 17)
            print('Analytic SRF: ',analytics_data.minute)

        except Sfr_analytics.DoesNotExist:
            total_volume = 0
            print({"message": "SFR Not Found"})
        finally:
            try:
                # get epg for last week based on start and interval
                print({"EPG":{"last_week":last_week_minut}})
                last_week_epg = f"{last_week_day} {last_week_minut}"
                epg_data = Epg.objects.filter(start_time__lte=last_week_epg,end_time__gte=last_week_epg).first()
                print("EPG: ",epg_data.emission_name)
                # serialized_data = serializers.serialize('json', epg_data)
                # # Convert serialized data to Python data (list of dictionaries)
                # deserialized_data = json.loads(serialized_data)
                # call ALMA API
                # thread = Thread(target=call_vast_api,args=("Leila","Feuilleton",(emission.end_time - emission.start_time).total_seconds(),total_volume))
                # thread.start()
                #
                # return JsonResponse(deserialized_data)
            except Epg.DoesNotExist:
                print({"message": "EPG Not Found"})
        return JsonResponse({"data":last_week})

def alma_callback(url,headers,reply):

    response = requests.get(url, headers=headers)
    # Update Vast Response Status
    if response.status_code == 200:
        reply.status = response.text
        reply.save()
        return response


def answer_Alma(request):

    updated_replies = VastResponse.objects.filter(datetime_timestamp__icontains='2024-04-17',status=None)
    verify_replies = VastResponse.objects.filter(datetime_timestamp__icontains='2024-04-17')
    # for reply in updated_replies:
    #     reply.delete()
    #     reply.save()

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    if updated_replies:
        # Creating Threading Pool for Tracking Completed links
        # Send requests to tracking completed links
        with ThreadPoolExecutor(max_workers=20) as executor:
            futures = [executor.submit(alma_callback, reply.tracking_complete, headers=headers,reply=reply) for reply in updated_replies]
            result = [future.result() for future in futures]


def update_vast_response_v2(request):
    from .tasks import update_vast

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }
    print("called")
    updater = update_vast.delay()

def update_vast_response(request):
    import csv
    # from DAIManagementApp.models import Trackinglink

    # verify_replies = VastResponse.objects.filter(
    #     datetime_timestamp__icontains='2024-10-17',
    #     tracking_start_status=None,
    #     tracking_firstquartile_status=None,
    #     tracking_midpoint_status=None,
    #     tracking_thirdquartile_status=None,
    #     #
    #     impression_sprintserve_status=None,
    #     impression_double_click_status=None)
    verify_replies = 10

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }

    # Function to make requests asynchronously
    # def make_requests(url,reply):
    #     from urllib.parse import urlparse, parse_qs
    #     parsed_url = urlparse(url)
    #     print(reply)
    #     # Get the query parameters
    #     query_params = parse_qs(parsed_url.query)
    #     response = requests.get(url=url,headers=headers)
    #     print(response.status_code)
    #     if response.text != None:
    #         # Get the value of the 'event' parameter
    #         event_value = query_params.get('event')

    #         if "js_start" in url :
    #             # print(reply)
    #             reply.tracking_start_status = response.text
    #             reply.save()
    #         elif "js_first_quartile" in url:
    #             reply.tracking_firstquartile_status = response.text
    #             reply.save()
    #         elif "js_midpoint" in url:
    #             reply.tracking_midpoint_status = response.text
    #             reply.save()
    #         elif "js_third_quartile" in url:
    #             reply.tracking_thirdquartile_status = response.text
    #             reply.save()
    #         elif "vast_impression" in url:
    #             reply.impression_sprintserve_status = response.text
    #             reply.save()
    #         elif "doubleclick" in url:
    #             reply.impression_double_click_status = response.text
    #             reply.save()
    #         elif "js_complete" in url:
    #             print("Check: ",response.text)
    #             reply.tracking_completed_status = response.text
    #             reply.save()

    #         else:

    #             reply.impression_double_click_status = response.text
    #             reply.save()
    def make_request_lego(url):
        from urllib.parse import urlparse, parse_qs

        parsed_url = urlparse(url)
        # Get the query parameters
        query_params = parse_qs(parsed_url.query)
        response = requests.get(url=url,headers=headers)

        print(response.status_code)
        return response


    verify_replies = 1
    # request_counts = 0
    # tracking_links = [
    #     "https://example.com",
    #     "https://example.com",
    #     "https://example.com",
    #     "http://examp.com/test"
    # ]
    tracking_links = {
        "City":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32770836.406082389;dc_trk_aid=598249260;dc_trk_cid=223908355;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;gdpr=$%7BGDPR%7D;gdpr_consent=$%7BGDPR_CONSENT_755%7D;ltd=;dc_tdv=1",
            # "https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"
            ],
        "Ninja-GO":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32609683.406115909;dc_trk_aid=598248921;dc_trk_cid=223962908;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;gdpr=$%7BGDPR%7D;gdpr_consent=$%7BGDPR_CONSENT_755%7D;ltd=;dc_tdv=1",
            # "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
        ],
        "Technic":[
            "https://ad.doubleclick.net/ddm/trackimp/N2359742.5267928STAMP0/B32623870.406579509;dc_trk_aid=598397817;dc_trk_cid=224003872;ord=[timestamp];dc_lat=;dc_rdid=;tag_for_child_directed_treatment=;tfua=;ltd=;dc_tdv=1"
            ],
            # "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
            # "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"]
    }
    # tracking_links = {
    #     "ninjago": [
    #         "https://example.com",
    #         "https://example.com"
    #     ],
    #     "lego": [
    #         "https://example.com"
    #     ]
    # }
    with ThreadPoolExecutor(max_workers=10) as executor:
        csv_filename = "/var/www/html/DAI27/media/Vast-3.csv"
        with open(csv_filename, 'a', newline='') as csvfile:
            writer = csv.writer(csvfile)
            # Loop through groups and track each link
            with ThreadPoolExecutor(max_workers=100) as executor:
                with open(csv_filename, 'w', newline='') as csvfile:
                    writer = csv.writer(csvfile)
                    writer.writerow(['Link', 'Status Code', 'Number of Requests'])
                    for spot, urls in tracking_links.items():
                        for url in urls:
                            # Ensure we have 100 futures to handle 100 requests per link
                            futures = [executor.submit(make_request_lego, url) for _ in range(verify_replies)]

                            # Track request count and status codes
                            request_count = 0
                            # Process each future result
                            for future in futures:
                                try:
                                    res = future.result()
                                    request_count += 1

                                    # Save the result in the database
                                    trakcing = Trackinglink.objects.create(
                                        spot=spot,
                                        url=url,
                                        status_code=res.status_code,
                                        request_count=request_count
                                    )

                                    writer.writerow([url, res.status_code, request_count])
                                except Exception as e:
                                    raise e
                                    # print(f"Error processing request: {e}")

                        # Update or create the link in the database
                        # tracking_link, created = TrackingLink.objects.update_or_create(
                        #     spot=spot,
                        #     url=url,
                        #     defaults={
                        #         'status_code': status_code,
                        #         'request_count': request_counts
                        #     }
                        # )

                        # Write to the CSV file
                        # writer.writerow([url, status_code, request_counts])

    # request_counts = 0
    # with ThreadPoolExecutor(max_workers=100) as executor:
    #     futures = {
    #         executor.submit(make_request_lego, url): url for url in tracking_links for _ in range(verify_replies)
    #     }
    #     csv_filename = "/var/www/html/DAI27/media/Vast.csv"
    #     with open(csv_filename, 'a', newline='') as csvfile:
    #         writer = csv.writer(csvfile)
    #         for future in futures:
    #             try:
    #                 request_counts+=1
    #                 response = future.result()
    #                 writer.writerow([future.result(), response.status_code,request_counts],)
    #             except Exception as e:
    #                 writer.writerow([future.result(), 'Error'])
    #                 print(f"Error: {e}")


    # Create a ThreadPoolExecutor with max_workers=20
    # with ThreadPoolExecutor(max_workers=100) as executor:
    #     # Submit requests for various URLs
    #     futures = {
    #         executor.submit(make_requests, url,reply): url for reply in range(verify_replies) for url in [
    #             reply.tracking_start,
    #             reply.tracking_firstquartile,
    #             reply.tracking_midpoint,
    #             reply.tracking_thirdquartile,
    #             reply.impression_sprintserve,
    #             reply.impression_double_click,
    #             reply.tracking_complete


    #         ]
    #     }

        # Collect results
        # results = {futures[future]: future.result() for future in futures}


def run_task_view(request):
    from .tasks import sample_task
    sample_task.delay()  # Schedule the task to run in the background
    return HttpResponse("Task is running in the background!")




def update_vast_response_v3(request):


    from datetime import datetime
    import pytz
    from django.db.models import Q

    # Get the current date and time
    current_datetime = datetime.now()

    # Format the date in 'YYYY-MM-DD' format
    report_date = current_datetime.strftime('%Y-%m-%d')
    # verify_replies = VastResponse.objects.filter(
    #     datetime_timestamp__icontains=report_date,
    #     (Q(impression_sprintserve_status=None) | Q(impression_double_click_status=None))
    # )
    verify_replies = VastResponse.objects.filter(
        datetime_timestamp__icontains=report_date,
        tracking_start_status=None,
        tracking_firstquartile_status=None,
        tracking_midpoint_status=None,
        tracking_thirdquartile_status=None,
        #
        impression_sprintserve_status=None,
        impression_double_click_status=None)

    headers = {
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
    }

    # Function to make requests asynchronously
    def make_requests(url,reply):
        from urllib.parse import urlparse, parse_qs
        parsed_url = urlparse(url)

        # Get the query parameters
        query_params = parse_qs(parsed_url.query)
        response = requests.get(url=url,headers=headers)
        if response.text != None:
            # Get the value of the 'event' parameter
            event_value = query_params.get('event')

            if "js_start" in url :
                # print(reply)
                reply.tracking_start_status = response.text
                reply.save()
            elif "js_first_quartile" in url:
                reply.tracking_firstquartile_status = response.text
                reply.save()
            elif "js_midpoint" in url:
                reply.tracking_midpoint_status = response.text
                reply.save()
            elif "js_third_quartile" in url:
                reply.tracking_thirdquartile_status = response.text
                reply.save()
            elif "vast_impression" in url:
                reply.impression_sprintserve_status = response.text
                reply.save()
            elif "doubleclick" in url:
                reply.impression_double_click_status = response.text
                reply.save()
            elif "js_complete" in url:
                print("Check: ",response.text)
                reply.tracking_completed_status = response.text
                reply.save()

            else:

                reply.impression_double_click_status = response.text
                reply.save()





    # Create a ThreadPoolExecutor with max_workers=20
    with ThreadPoolExecutor() as executor:
        # Submit requests for various URLs
        futures = {
            executor.submit(make_requests, url,reply): url for reply in verify_replies for url in [
                # reply.tracking_start,
                # reply.tracking_firstquartile,
                # reply.tracking_midpoint,
                # reply.tracking_thirdquartile,
                reply.impression_sprintserve,
                reply.impression_double_click,
                # reply.tracking_complete

            ]
        }

class VastReport(APIView):

    def get(self,request):
        from datetime import datetime
        from .models import VastResponseGo

        today = datetime.now().strftime("%Y-%m-%d")

        campaigns_list = ["Too Good To Go 30s","Too Good To Go 15s"]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)
        vast_total_impressions = VastResponseGo.objects.filter(
            datetime_timestamp__contains=today,
            impression_sprintserve_status__isnull=False,
            ).count()

        # get yesterday impression to compare it with today
        prev_vast_total_impression = VastResponseGo.objects.filter(
            datetime_timestamp__contains=(datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d"),
            impression_sprintserve_status__isnull=False,
            ).count()


        return render(request,"DAIManagementApp/vast_report.html",{
            "campaigns":campaigns,
            "vast_impression":vast_total_impressions,
            "prev_impression":prev_vast_total_impression})

    def post(self, request, format=None):

        from datetime import datetime
        from django.db.models import Count
        from django.db.models.functions import TruncDate


        start = request.POST.get("start", datetime.now().strftime("%Y-%m-%d"))
        end = request.POST.get("end", datetime.now().strftime("%Y-%m-%d"))
        # Get 'start' and 'end' dates from request or use current date as default
        if start and end:

            print("start: ",start)
            print("end: ",end)
            # Filter VastResponse objects by date range

            result = self.get_campaigns(start=start,end=end)
            return JsonResponse(result, safe=False)
        else:
            today = datetime.now().strftime("%Y-%m-%d")
            last_week =  (datetime.now() - timedelta(days=7)).strftime("%Y-%m-%d")
            results = self.get_campaigns(start=last_week,end=today)
            return JsonResponse(results,safe=False)
            print(f"we will get campaigns from {today} intel {last_week}")

    def get_campaigns(self,start,end):
        from django.db.models import Count
        from django.db.models.functions import TruncDate

        vast_queryset = VastResponse.objects.filter(
                datetime_timestamp__date__gte=start,
                datetime_timestamp__date__lte=end
            )

        # Group data by date
        vast_data_grouped = vast_queryset.annotate(date=TruncDate('datetime_timestamp')) \
                                        .values('date') \
                                        .annotate(date_count=Count('date')) \
                                        .order_by('date')

        result = []

        # Iterate over each date group
        for date_group in vast_data_grouped:
            date = date_group['date']
            date_count = date_group['date_count']
            campaigns_data = Adspots().get_spots_impressions(start_date=date)

            # Add data for the current date to the result list
            result.append({
                'date': date,
                'count': date_count,
                'campaigns': campaigns_data
                # [
                #     {'campaign_name': campaign, 'ad_spots': ad_spots}
                #     for campaign, ad_spots in campaigns_data.items()
                # ]
            })
        return result


class VastChart(APIView):
    def get(self,request):
        campaigns_list = ["Too Good To Go 30s","Too Good To Go 15s"]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)
        print(campaigns)
        return render(request,"DAIManagementApp/vast_call.html",{"campaigns":campaigns})


class LoadCampaign(APIView):
    def get(self, request):
        campaign_name = request.GET.get("campaign_name")

        try:
            # Fetch campaign object
            campaign_obj = Campaigns.objects.get(name=campaign_name)

            # Retrieve spots and convert to list of dictionaries
            spots = Adspots.objects.filter(id_campaign=campaign_obj).values(
                'filename'  # Add other fields as needed
            )

            # Convert to list and return as JSON response
            return JsonResponse(list(spots), safe=False)

        except Campaigns.DoesNotExist:
            return JsonResponse({"error": "Campaign not found"}, status=404)

# def vastresponse(request):
#     # query = Adspots().get_impressions(start_date="2024-10-29")
#     query = Adspots().get_spots_impressions(start_date="2024-11-01")
#     return JsonResponse(query,safe=False)

class CallVast(APIView):


    def get(self,request):
        return render(request,"DAIManagementApp/vast_call.html",{"campaigns":self.get_campaigns()})

    def post(self,request):
        from .tasks import call_vast
        import requests
        from concurrent.futures import ThreadPoolExecutor

        campaign_name  = request.POST.get("campaign_id")
        # campaign_name = "Friends"
        adspot = request.POST.get("adspots")
        num_request = request.POST.get("num_request")
        num_request = int(num_request)
        campaign_obj = Campaigns.objects.get(name=campaign_name)
        if campaign_name and adspot and num_request > 0:
            # end_date = datetime.strptime(campaign_obj.end_day, "%Y-%m-%d").date()
            # if end_date > datetime.today().date():
            #     print(f"Campaign: {campaign_name} is Expired!")
            # else:
            print("Campaign name: ",campaign_name)
            print("Campaign END DATE: ",campaign_obj.end_day)
            params = {
                'w': '720',
                'h': '567',
                'content_genre': "Dessin Anime",
                'content_title': "Abtal albihar",
                'language': 'ar-MA',
                'pod_max_dur': 20,
                'channel_name': '2M_TV',
                'country': 'France'
            }
            link_mapping = {
                "Too Good To Go 30s": "https://tv.springserve.com/vast/881543?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}",
                "Too Good To Go 15s": "https://tv.springserve.com/vast/881550?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"
            }

            # *Links to send you have to comment them one by one

            #! Weaber
            # url = "https://tv.springserve.com/vast/769609"

            #! City
            # url = 'https://tv.springserve.com/vast/850575?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}'

            #! Ninjago
            # url = "https://tv.springserve.com/vast/850576?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            #! Technic
            # url = "https://tv.springserve.com/vast/850577?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            #! Star Wars
            # url = "https://tv.springserve.com/vast/852369?w=1920&h=1080&cb={{CACHEBUSTER}}&ip={{IP}}&ua={{USER_AGENT}}&pod_max_dur={{POD_MAX_DUR}}&pod_ad_slots={{POD_AD_SLOTS}}&app_bundle={{APP_BUNDLE}}&app_name={{APP_NAME}}&app_store_url={{APP_STORE_URL}}&did={{DEVICE_ID}}&us_privacy={{US_PRIVACY}}"

            headers = {
                'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36'
            }
            print("Link: ",link_mapping[campaign_name])
            vast = call_vast.delay(url=link_mapping[campaign_name],num_requests=num_request,params=params,spot=adspot)

        return render(request,"DAIManagementApp/vast_call.html",{'campaigns':self.get_campaigns()})

    def get_campaigns(self):
        campaigns_list = ["City", "Harry_Potter", "Technic", "StarWars",]
        campaigns = []
        for campaign_name in campaigns_list:
            campaign_obj = Campaigns.objects.get(name=campaign_name)
            campaigns.append(campaign_obj)

        return campaigns


def update_report(request):
    spot = Adspots.objects.get(filename="LegoPorsche.ts")
    vasts = VastResponse.objects.filter(ad_spot=spot)
    print(vasts.count())
    # return JsonResponse()


