# import os
# import sys


# sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))

# from services.notifications.sms_service import SMSService

# SMSService.send_sms("+16784162650", """
# Hey Andrew, our team has been trying to verify your post by attempting to follow you on Instagram from @alixtheverifier.
# To receive your reward, please accept the follow request 😄
# """)

import os
import sys
import time


sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
import datetime

from environment import Environment

from services.firebase.firestore_service import FirestoreService

from google.cloud import storage as google_storage

firebase_service = FirestoreService()

# calculate reach of everyone who has signed up (every user in the database)
users = firebase_service.firestore_client.collection("users").stream()

potentially_missed_offers = [] # list of dicts, with dateAccepted, phoneNumber, instagramHandle, offerId, clientID, finalValidation, firstName

paths = [f"Instagram-UGC/", f"UGC-Offer-UGC/"]

def get_all_ugc_file_names_and_urls():
    bucket_name = f"{Environment.GCLOUD_PROJECT_ID}.appspot.com"
    cred_path = os.path.join(
            os.path.dirname(__file__), "..", "..", Environment.firebase_creds_filename
        )
    storage_client = google_storage.Client.from_service_account_json(cred_path)

    # Initialize an empty list to collect all blobs
    all_blobs_with_time = []

    for path in paths:
        blobs = storage_client.list_blobs(bucket_name, prefix=path)

        # Filter out any "blobs" that represent directories or prefixes
        valid_blobs = [blob for blob in blobs if not blob.name.endswith("/")]

        # Extend the all_blobs_with_time list with tuples containing a blob's public_url and time_created
        all_blobs_with_time.extend(
            [(blob.public_url, blob.time_created) for blob in valid_blobs]
        )

    # Sort the list of tuples by the time_created in descending order (i.e., newest to oldest)
    sorted_blobs_with_time = sorted(
        all_blobs_with_time, key=lambda x: x[1], reverse=True
    )

    # Create a list of maps with the file name and download urls
    files = [{"name": blob[0].split("/")[-1], "url": blob[0]} for blob in sorted_blobs_with_time]

    return files

ugc_files = get_all_ugc_file_names_and_urls()

# iterate through all users, identify users with an offer in the acceptedOffers array with UTCTimeAccepted within the past month
for user in users:
    user_data = user.to_dict()
    if "acceptedOffers" in user_data:
        for offer in user_data["acceptedOffers"]:
            if "UTCTimeAccepted" in offer:
                # Since UTCTimeAccepted is since epoch (ex: 1708324855352), convert to datetime and check if it's within the past month
                # If it is, add to potentially_missed_offers
                # If it's not, continue
                date_accepted = datetime.datetime.fromtimestamp(offer["UTCTimeAccepted"] / 1000)
                if (date_accepted > datetime.datetime.now() - datetime.timedelta(days=30) and offer["finalValidation"] == False
                    and not user_data["instagramHandle"] in ["wil.geller", "xplenture", "yuvansundrani", "bransonnotbrandon", "storyittesting"]): # only include offers that are within the past month and have not been validated
                        ugcUrlsFound = []
                        for ugc_file in ugc_files:
                            print(ugc_file.get("name"))
                            print(offer["clientID"])
                            print(user_data["phoneNumber"])
                            formatted_phone_number = user_data["phoneNumber"].replace("+1", "")
                            if formatted_phone_number in ugc_file.get("name") and offer["clientID"] in ugc_file.get("name") and offer["offerID"] in ugc_file.get("name"):
                                ugcUrlsFound.append(ugc_file.get("url"))
                        print(ugcUrlsFound)

                        if len(ugcUrlsFound) > 0:
                            # convert date to readable string format
                            date_accepted = date_accepted.strftime("%m/%d/%Y, %H:%M:%S")
                            new_entity = {
                                "dateAccepted": date_accepted,
                                "clientID": offer["clientID"],
                                "phoneNumber": user_data["phoneNumber"],
                                "instagramHandle": user_data["instagramHandle"],
                                "firstName": user_data["firstName"],
                                "offerID": offer["offerID"],
                                "finalValidation": offer["finalValidation"],
                                "ugcURLsFound": ugcUrlsFound
                            }
                            potentially_missed_offers.append(new_entity)

# sort potentially_missed_offers by dateAccepted (earliest to latest)
potentially_missed_offers = sorted(potentially_missed_offers, key=lambda x: x["dateAccepted"])

print(potentially_missed_offers)
                
# export list of potentially missed offers to a text file
with open("potentially_missed_offers.txt", "w") as file:
    i = 0
    for offer in potentially_missed_offers:
        file.write(f"{i}). {offer} \n\n")
        i += 1

