import csv
import re
import time

import cloudscraper
import requests
from lxml import html


def simple_cloud_scraper(link):
    scraper = cloudscraper.create_scraper(browser={
        'browser': 'firefox',
        'platform': 'linux',
        'mobile': False
    })
    return scraper.get(link)


# fifa full data in : 'https://www.ea.com/fifa/ultimate-team/web-app/loc/en-US.json?_=22096'
def get_nations_list():
    result = simple_cloud_scraper('https://www.futbin.com/24/sFilters?mobile=0&platform=ps4')
    tree = html.fromstring(result.text)
    values = tree.xpath('//ul[*[@id="resultNation"]]//ul//a')[2:]
    nations_list = []
    for item in values:
        nations_list.append({'name': item.xpath('./span/text()')[0], 'id': item.xpath('./@data-value')[0]})
    return nations_list


def get_leagues_list():
    result = simple_cloud_scraper('https://www.futbin.com/24/sFilters?mobile=0&platform=ps4')
    tree = html.fromstring(result.text)
    values = tree.xpath('//a[@data-filter-type="league"]')[2:]
    leagues_list = []
    for item in values:
        if item.xpath('./span/text()'):
            leagues_list.append({'name': item.xpath('./span/text()')[0], 'id': item.xpath('./@data-value')[0]})
        else:
            leagues_list.append({'name': ''.join(item.xpath('./text()')), 'id': item.xpath('./@data-value')[0]})
    return leagues_list


def get_clubs_list():
    result = simple_cloud_scraper('https://www.futbin.com/24/sFilters?mobile=0&platform=ps4')
    tree = html.fromstring(result.text)
    values = tree.xpath('//a[@data-filter-type="club"]')
    clubs_list = []
    for item in values:
        if item.xpath('./span/text()'):
            clubs_list.append({'name': item.xpath('./span/text()')[0], 'id': item.xpath('./@data-value')[0]})
        else:
            clubs_list.append({'name': ''.join(item.xpath('./text()')), 'id': item.xpath('./@data-value')[0]})
    return clubs_list


def get_clubs_list_2():
    result = requests.get('https://www.ea.com/ea-sports-fc/ultimate-team/web-app/loc/en-US.json')
    club_list = []
    club_names = []
    pure_club_list = []
    for key, value in result.json().items():
        if key.startswith('global.teamabbr15.2024'):
            item_id = int(key.replace('global.teamabbr15.2024.team', ''))
            pure_club_list.append({item_id: value})
    pure_club_list = sorted(pure_club_list, key=lambda d: list(d.keys()))
    for item_2 in pure_club_list:
        item_id_2, value_2 = item_2.popitem()
        item_name = value_2
        if item_name in club_names:
            item_name = str(item_name) + ' ( women )'
        club_list.append({'id': item_id_2, 'name': item_name})
        club_names.append(item_name)

    return club_list


def get_rarities_list():
    result = requests.get('https://www.ea.com/ea-sports-fc/ultimate-team/web-app/loc/en-US.json?_=22153')
    rarities_list = []
    for key, value in result.json().items():
        if key.startswith('FUT_POA_PLAYER_RARITY'):
            rarities_list.append({'id': key.replace('FUT_POA_PLAYER_RARITY_', ''), 'name': value})

    return rarities_list


def futbin_players_crowler():
    futbin_input_file = 'futbin_input.csv'
    with open(futbin_input_file, 'w', newline='') as output_file:
        csv_writer = csv.writer(output_file, delimiter='|')
        csv_writer.writerow(["Name","Rating","Rarity","PreferredPosition","Nation","League","Team","LastSalePrice",
                       "DiscardValue","Untradeable","Loans","DefinitionId","IsDuplicate","IsInActive11",
                       "AlternatePositions","ExternalPrice", "IsExists"])
    from lxml import html
    scraper = cloudscraper.create_scraper(browser={
        'browser': 'firefox',
        'platform': 'linux',
        'mobile': False
    })
    for page_num in range(1, 560):
        link = f'https://www.futbin.com/players?page={page_num}&pos_type=all&ps_price=10-5000&version=silver_rare,gold_rare,bronze_rare,bronze_nr,gold_nr,silver_nr'
        resp = scraper.get(link)
        tree = html.fromstring(resp.content)
        trs = tree.xpath('//table[@id="repTb"]/tbody/tr[contains(@class,"player_tr")]')
        players_list = []
        for tr_item in trs:
            asset_id = re.search('/players/(.+)\.png', tr_item.xpath("./td[2]/div[1]/img/@data-original")[0]).group(1)
            rare_flag = str(tr_item.xpath("./td[3]/span/@class")[0]).replace('form rating ut24', '')
            if 'non-rare' in rare_flag:
                rare_flag = 0
            elif 'rare' in rare_flag:
                rare_flag = 1
            rating = tr_item.xpath("./td[3]//text()")[0]
            preferred_position = tr_item.xpath("./td[4]/div[1]/text()")[0]
            possible_positions = tr_item.xpath("./td[4]/div[2]/text()")
            if possible_positions:
                possible_positions = str(possible_positions[0]).split(',')
            else:
                possible_positions = []
            team = re.search('/clubs/(.+)\.png', tr_item.xpath(".//span[@class='players_club_nation']/a[1]/img/@src")[0]).group(1)
            nation = re.search('/nation/(.+)\.png',tr_item.xpath(".//span[@class='players_club_nation']/a[2]/img/@src")[0]).group(1)
            league = re.search('/league/(.+)\.png', tr_item.xpath(".//span[@class='players_club_nation']/a[3]/img/@src")[0]).group(1)
            cost = str(tr_item.xpath("./td[6]/span/text()")[0]).strip()
            if 'K' in cost:
                cost = float(cost.replace('K', ''))*1000
            cost = int(cost)
            possible_positions.append(preferred_position)
            players_list.append([
                asset_id,
                rating,
                rare_flag,
                preferred_position,
                str(nation),
                str(league),
                str(team),
                0,
                0,
                False,
                False,
                0,
                False,
                False,
                ','.join(possible_positions),
                cost,
                0,
            ])
            print('page : ', page_num, ' --- ',asset_id, rating, preferred_position, possible_positions, team, nation, league)
        with open(futbin_input_file, 'a', newline='') as output_file:
            csv_writer = csv.writer(output_file, delimiter='|')
            csv_writer.writerows(players_list)
        time.sleep(3)


if __name__ == '__main__':
    list_i = []
    # for i in get_clubs_list():
    for i in get_clubs_list_2():
        list_i.append((i['id'], i['name']))
    print(list_i)
