from signal import pthread_kill, SIGTSTP
from itertools import count
from newMarketBot import NewMarketBot
from requests_ip_rotator import ApiGateway, EXTRA_REGIONS, ALL_REGIONS
import time
import random
import requests
import queue
import psycopg2
from FinalListing import FinalListing
import threading
import socket
import os

all_threads = []

class MyThread(threading.Thread):
    def __init__(self, name, target_func, *args):
        super().__init__()
        self.name = name
        self.target_func = target_func
        self.args = args
        self.stop_flag = threading.Event()

    def run(self):
        while not self.stop_flag.is_set():
            print(f"Thread {self.name} is running")
            self.target_func(*self.args)
            time.sleep(1)
        print(f"Thread {self.name} stopped")

    def stop(self):
        self.stop_flag.set()

def stop_thread(thread_name):
    for thread in threading.enumerate():
        print(str(thread.name))
        if str(thread.name) == thread_name:
            thread.stop()
            print("JOJOJOJO")
            #thread.stop()
            break


def send_thread_names(client_socket):
    while True:
        # Get the names of active threads
        active_thread_names = [thread.name for thread in threading.enumerate()]
        # Send the names of active threads to the monitoring program
        client_socket.send(",".join(active_thread_names).encode())
        time.sleep(5)  # Adjust the interval as needed

def startSocket():
    # Communication setup
    server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server_socket.bind(('localhost', 5002))
    server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    server_socket.listen(1)
    c_socket, address = server_socket.accept()

    print("Monitoring program running...")

    while True:
        # Receive the names of active threads from the server
        try:
            active_thread_names = c_socket.recv(1024).decode()
            if len(active_thread_names) == 0:
               time.sleep(10)
            print(f"Active threads: {active_thread_names}")
        except Exception as e:
            print(str(e))
        finally:
            server_socket.close()

socket_thread = MyThread("socket_thread", startSocket)
#socket_thread.args = (startSocket,)
socket_thread.daemon = True
socket_thread.start()
all_threads.append(socket_thread)

NEW_ALL_REGIONS = ALL_REGIONS + ["ap-south-2", "ap-southeast-3", "ap-southeast-4", "ca-west-1", "eu-central-2", "eu-south-2", "il-central-1", "me-central-1"]
all_sessions = []
for region in NEW_ALL_REGIONS:
  gateway = ApiGateway("https://steamcommunity.com", access_key_id="AKIAVRYQ3XK3I75DLJEH", access_key_secret="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", regions=[str(region)])
  gateway.start()
  aws_session = requests.Session()
  aws_session.mount("https://steamcommunity.com", gateway)
  all_sessions.append(aws_session)
#gateway = ApiGateway("https://steamcommunity.com", access_key_id="AKIAVRYQ3XK3I75DLJEH", access_key_secret="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", regions=["eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "eu-north-1", "eu-south-1", "eu-south-2", "eu-central-2", "us-east-1", "us-east-2", "ca-central-1"])

def handleCsgofloatQueue(csgofloat_queue: queue.Queue, webshare_ips: list):
  while True:
    if csgofloat_queue.qsize() <= 0:
      time.sleep(random.uniform(0.5, 1))
      
    else:
      
      skin_in_db = True
      time.sleep(random.uniform(0.5, 0.7))
      single_elem = csgofloat_queue.get()
      time.sleep(random.uniform(0.5, 0.7))
      price = single_elem[0]
      market_link = single_elem[1]
      single_full_item_name = single_elem[2]
      single_inspect_link = single_elem[3]
      m = single_elem[4]
      s = single_elem[5]
      high_rank_in_lowfloat = single_elem[6]
      low_rank_in_highfloat = single_elem[7]
      high_low = single_elem[8]

      try:
          postgresql_conn = psycopg2.connect(database="postgres", user = "postgres", password = "Berufsorientierung1!", host = "23.88.122.57", port = "5432")
          postgresql_cur = postgresql_conn.cursor()
          postgresql_cur.execute('SELECT rank FROM floats WHERE inspect_link = %s', (single_inspect_link,))
      
          rank_db = postgresql_cur.fetchone()[0]
          print("SKIN IN DB, SKIP")
      except TypeError:
          print("SKIN NOT IN DB, CONTINUE")
          skin_in_db = False
          postgresql_cur.close()
      except (Exception, psycopg2.DatabaseError) as error:
        print(error)
      
      
      try:
        if postgresql_conn is not None:
          postgresql_conn.close()
      except Exception as e:
        print(str(e))
      
      if skin_in_db is False:
        nm = NewMarketBot("highlow", "Queue Thread", webshare_ips, all_sessions, False, queue.Queue(), queue.Queue())
        final_listing = nm.singleCheckCsgofloatRank(market_link, single_full_item_name, single_inspect_link, m, price)
        if final_listing is None:
          continue
        elif final_listing == "neger":
          continue
        else:
          nm.singleCheckForPotentialBuy(final_listing, single_inspect_link, 1337)
      else:
        continue

def handleListingsQueue(listings_queue: queue.Queue, webshare_ips, csgofloat_queue):
  while True:
    if listings_queue.qsize() <= 0:
      time.sleep(random.uniform(0.5, 1))
      
    else:
       bulk_list = listings_queue.get()[0]
       high_low = list(bulk_list[0].values())[0]["high_low"]
       nm = NewMarketBot(str(high_low), "Best Thread", webshare_ips, all_sessions, False, queue.Queue(), csgofloat_queue)
       nm.getBestOrWorstSkinsBulk(bulk_list)

def getWebshareProxies() -> list:
    ip_addresses = []
    
    response = requests.get(
    "https://proxy.webshare.io/api/v2/proxy/list/download/hdaovifqwgapnzijunmiptygnyrtyqaeyvvvqgdo/-/any/username/direct/"
    )
    splitted = response.text.rsplit("\n")
    for ip in splitted:
       clean_ip = ip.replace("\r", "")
       splitted_ip = clean_ip.split(":")
       if splitted_ip[0] != "":
        full_ip = "http://" + splitted_ip[2] + ":" + splitted_ip[3] + "@" + splitted_ip[0] + ":" + splitted_ip[1]
        ip_addresses.append(full_ip)
    return ip_addresses

def startScraper(filename: str, bs_and_fn: bool):
    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    client_socket.connect(('localhost', 5002))
    send_thread_names_thread = MyThread("send_thread_names_thread", send_thread_names, client_socket)
    #send_thread_names_thread = threading.Thread(target=send_thread_names, args=(client_socket,))
    send_thread_names_thread.daemon = True
    send_thread_names_thread.start()
    all_threads.append(send_thread_names_thread)


    webshare_ips = getWebshareProxies()

    csgofloat_queue = queue.Queue()
    for i in range(3):
        t = threading.Thread(target=handleCsgofloatQueue, args=(csgofloat_queue, webshare_ips,))
        t.name = "csfloat_queue_thread" + str(i)
        t.daemon = True
        t.start()


    listings_queue = queue.Queue()
    for i in range(3):
      t_l = threading.Thread(target=handleListingsQueue, args=(listings_queue, webshare_ips,csgofloat_queue,))
      t_l.name = "listings_queue_thread" + str(i)
      t_l.daemon = True
      t_l.start()

    f = open(filename, "r")
    lines = f.readlines()
    f.close()

    lines = [x.strip() for x in lines]

    extended_scrape = False

    while True:
        if bs_and_fn:
            floatval = "high"
            runThreads(weapon_list=lines, high_or_low=floatval, webshare_ips=webshare_ips, all_sessions=all_sessions, extended_scrape=extended_scrape, listings_queue=listings_queue, csgofloat_queue=csgofloat_queue)

            floatval = "low"
            runThreads(weapon_list=lines, high_or_low=floatval, webshare_ips=webshare_ips, all_sessions=all_sessions, extended_scrape=extended_scrape, listings_queue=listings_queue, csgofloat_queue=csgofloat_queue)
            time.sleep(120)


def runThreads(weapon_list: list, high_or_low: str, webshare_ips: list, all_sessions: list, extended_scrape: bool, listings_queue: queue.Queue, csgofloat_queue: queue.Queue):
  threads = []
  for line in weapon_list:
    m = NewMarketBot(high_or_low, str(line.rstrip()), webshare_ips, all_sessions, extended_scrape, listings_queue, csgofloat_queue)
    t = MyThread(str(line), m.startBot)
    all_threads.append(t)
    #t = threading.Thread(target=m.startBot)
    #t.name = str(line)
    threads.append(t)


  for thread in threads:
    thread.start()
    time.sleep(20)

  for thread in threads:
    thread.join()

if __name__ == "__main__":
  #Runs the scraper for all weapons
  startScraper(filename="list/tag_weapon_list.txt", bs_and_fn=True)

    
