import requests
import ssl
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.util import ssl_
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import time
import json
import random
from LoggingFormatter import logger

CIPHERS = "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384: ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:AES256-SHA"


class TlsAdapter(HTTPAdapter):
    def __init__(self, ssl_options=0, **kwargs):
        self.ssl_options = ssl_options
        super(TlsAdapter, self).__init__(
            **kwargs, pool_maxsize=5, max_retries=3, pool_block=False
        )

    def init_poolmanager(self, *pool_args, **pool_kwargs):
        ctx = ssl_.create_urllib3_context(
            ciphers=CIPHERS, cert_reqs=ssl.CERT_REQUIRED, options=self.ssl_options
        )
        self.poolmanager = PoolManager(*pool_args, ssl_context=ctx, **pool_kwargs)


class RequestManager(object):
    def __init__(self, all_sessions, webshare_ips):
        self.all_sessions = all_sessions
        self.ua_dict = self.prepareUserAgents()
        self.webshare_ips = webshare_ips
        self.adapter = TlsAdapter(ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)

    def getAWSSession(self):
        return random.choice(self.all_sessions)

    def prepareUserAgents(self):
        with open("user_agents.json") as f:
            uas = json.load(f)
        return uas

    def getRandomUAEntry(self):
        return random.choice(list(self.ua_dict.values()))

    def getNewSession(self):
        s = requests.Session()
        s.mount("https://", self.adapter)
        return s

    def getRequest(self, url, without_aws=False):
        logger.warning("Trying to get: " + str(url) + " with AWS")
        if str(url).startswith("https://steamcommunity.com") and without_aws is False:
            for i in range(2):
                aws_response = self.getAWSSession().get(url, timeout=15)
                if aws_response is not None:
                    if aws_response.status_code == 200:
                        return aws_response
                time.sleep(i + 0.5)

        without_aws_response = self.getRequestWithoutAWS(url)
        if without_aws_response is not None:
            if without_aws_response.status_code == 200:
                return without_aws_response

        logger.critical("All requests failed with url: " + str(url) + " - repeating...")
        time.sleep(3)
        return None

    def getRequestOnlyAWS(self, url, repeats):
        logger.warning("Trying to get: " + str(url) + " with AWS")
        for i in range(1, repeats + 1):
            is_none = True
            len_zero = False
            aws_response = self.getAWSSession().get(url, timeout=15)
            if aws_response is not None:
                is_none = False
                if aws_response.status_code == 200:
                    try:
                        json_page = aws_response.json()
                    except Exception as e:
                        logger.critical(
                            "getRequestOnlyAWS() to json failed with "
                            + str(url)
                            + " - "
                            + str(e)
                        )
                    if len(json_page["listinginfo"]) == 0:
                        len_zero = True
                        time.sleep(i)
                        if i == repeats:
                            if len_zero == True:
                                time.sleep(1)
                                return 1337

                        time.sleep(2)
                        continue
                    return aws_response
            else:
                if i == repeats and is_none == True:
                    return None
        return None

    def getRequestWithoutAWS(self, url):
        iproyal_response = self.getRequestIPRoyal(url)
        if iproyal_response is not None:
            return iproyal_response
        return None

    def getRequestIPRoyal(self, url):
        logger.warning("Trying to get: " + str(url) + " with IPRoyal")
        s = self.getNewSession()
        iproyal_proxy = "http://alex133769:mArgare1he_region-europe_streaming-1@geo.iproyal.com:12321"
        random_ua_entry = self.getRandomUAEntry()
        s.headers.update({"User-Agent": random_ua_entry[1]})
        s.proxies.update({"https": iproyal_proxy, "http": iproyal_proxy})
        try:
            response = s.get(url, timeout=10)
            if response.status_code != 200:
                return None
            return response
        except Exception as e:
            logger.critical("Exception in getRequestIPRoyal: " + str(e))
        return None

    def getRequestWebshare(self, url):
        logger.warning("Trying to get: " + str(url) + " with Webshare")
        s = self.getNewSession()
        random_proxy = random.choice(self.webshare_ips)
        random_ua_entry = self.getRandomUAEntry()
        s.headers.update({"User-Agent": random_ua_entry[1]})
        s.proxies.update({"https": random_proxy, "http": random_proxy})
        try:
            response = s.get(url, timeout=10)
            if response.status_code != 200:
                return None
            return response
        except Exception as e:
            logger.critical("Exception in getRequestWebshare: " + str(e))
        return None

    def getRequestProxyCheap(self, url):
        logger.warning("Trying to get: " + str(url) + " with ProxyCheap")
        s = self.getNewSession()
        proxycheap_proxy = "http://bkxwrzbp:25vLo8CZESG3alvb@209.38.175.10:31112"
        random_ua_entry = self.getRandomUAEntry()
        s.headers.update({"User-Agent": random_ua_entry[1]})
        s.proxies.update({"https": proxycheap_proxy, "http": proxycheap_proxy})
        try:
            response = s.get(url, timeout=10)
            if response.status_code != 200:
                return None
            return response
        except Exception as e:
            logger.critical("Exception in getRequestIPRoyal: " + str(e))
        return None


    def getRequestProxyScrape(self, url):
        logger.warning("Trying to get: " + str(url) + " with Proxyscrape")
        s = self.getNewSession()
        proxyscrape_proxy = "http://che4pvco7u1h1db:bvt6yy7wrahb637@rp.proxyscrape.com:6060"
        random_ua_entry = self.getRandomUAEntry()
        s.proxies.update({"https": proxyscrape_proxy, "http": proxyscrape_proxy})
        s.headers.update({"User-Agent": random_ua_entry[1]})
        try:
            response = s.get(url, timeout=10)
            if response.status_code != 200:
                return None
            return response
        except Exception as e:
            logger.critical("Exception in getRequestIPRoyal: " + str(e))
        return None

    def getRequestNaked(self, url) -> requests.Response:
        s = self.getNewSession()
        r = s.get(url)
        return r

    def postRequestNaked(self, url, _json) -> requests.Response:
        s = self.getNewSession()
        r = s.post(url, json=_json)
        return r
