diff --git a/src/ScreepsUserTracker/API/API.csproj b/src/ScreepsUserTracker/API/API.csproj
new file mode 100644
index 0000000..6a82a8f
--- /dev/null
+++ b/src/ScreepsUserTracker/API/API.csproj
@@ -0,0 +1,19 @@
+
+
+
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+ PreserveNewest
+
+
+
+
diff --git a/src/ScreepsUserTracker/API/HttpClientManager.cs b/src/ScreepsUserTracker/API/HttpClientManager.cs
new file mode 100644
index 0000000..a296e10
--- /dev/null
+++ b/src/ScreepsUserTracker/API/HttpClientManager.cs
@@ -0,0 +1,41 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace API
+{
+ public static class HttpClientManager
+ {
+ public static HttpClient CreateHttpClient(string? proxyUrl = null)
+ {
+ var handler = new HttpClientHandler();
+
+ if (!string.IsNullOrEmpty(proxyUrl))
+ {
+ handler.Proxy = new WebProxy(proxyUrl)
+ {
+ BypassProxyOnLocal = true
+ };
+ handler.UseProxy = true;
+ }
+ else
+ {
+ handler.UseProxy = false;
+ }
+
+ var httpClient = new HttpClient(handler)
+ {
+ //Timeout = TimeSpan.FromSeconds(30),
+ };
+
+ // Enable compression
+ httpClient.DefaultRequestHeaders.AcceptEncoding.Add(new System.Net.Http.Headers.StringWithQualityHeaderValue("gzip"));
+ httpClient.DefaultRequestHeaders.AcceptEncoding.Add(new System.Net.Http.Headers.StringWithQualityHeaderValue("deflate"));
+
+ return httpClient;
+ }
+ }
+}
diff --git a/src/ScreepsUserTracker/API/HttpHelper.cs b/src/ScreepsUserTracker/API/HttpHelper.cs
new file mode 100644
index 0000000..67ff48b
--- /dev/null
+++ b/src/ScreepsUserTracker/API/HttpHelper.cs
@@ -0,0 +1,83 @@
+using Shared.Models;
+using System;
+using System.Collections.Generic;
+using System.IO.Compression;
+using System.Linq;
+using System.Net;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace API
+{
+ public static class HttpHelper
+ {
+ public static string DecodeGzip(string encodedString)
+ {
+ byte[] compressedBytes = Convert.FromBase64String(encodedString);
+ using (MemoryStream memoryStream = new MemoryStream(compressedBytes))
+ {
+ using (GZipStream gzipStream = new GZipStream(memoryStream, CompressionMode.Decompress))
+ {
+ using (StreamReader streamReader = new StreamReader(gzipStream))
+ {
+ return streamReader.ReadToEnd();
+ }
+ }
+ }
+ }
+ public static async Task GetAsync(string url, string? proxyUrl = null)
+ {
+ var result = new HttpResponseResult();
+
+ var client = HttpClientManager.CreateHttpClient(proxyUrl);
+ try
+ {
+ var response = await client.GetAsync(url);
+ result.StatusCode = response.StatusCode;
+ result.IsSuccessStatusCode = response.IsSuccessStatusCode;
+
+ if (response.IsSuccessStatusCode)
+ {
+ if (response.Content.Headers.ContentType?.MediaType == "application/octet-stream")
+ {
+ var contentBytes = await response.Content.ReadAsByteArrayAsync();
+ var decodedContent = DecodeGzip(Convert.ToBase64String(contentBytes));
+ result.Content = decodedContent;
+ }
+ else
+ {
+ result.Content = await response.Content.ReadAsStringAsync();
+ }
+ }
+ else
+ {
+ result.Content = $"Error: {response.ReasonPhrase}";
+ }
+ }
+ catch (HttpRequestException e)
+ {
+ // Handle network-related errors
+ result.StatusCode = HttpStatusCode.ServiceUnavailable;
+ result.Content = $"Request error: {e.Message}";
+ result.IsSuccessStatusCode = false;
+ }
+ catch (TaskCanceledException e)
+ {
+ // Handle timeout errors
+ result.StatusCode = HttpStatusCode.RequestTimeout;
+ result.Content = $"Request timeout: {e.Message}";
+ result.IsSuccessStatusCode = false;
+ }
+ catch (Exception e)
+ {
+ // Handle all other errors
+ result.StatusCode = HttpStatusCode.InternalServerError;
+ result.Content = $"Unexpected error: {e.Message}";
+ result.IsSuccessStatusCode = false;
+ }
+
+ client.Dispose();
+ return result;
+ }
+ }
+}
diff --git a/src/ScreepsUserTracker/API/Proxies.cs b/src/ScreepsUserTracker/API/Proxies.cs
new file mode 100644
index 0000000..21ccccc
--- /dev/null
+++ b/src/ScreepsUserTracker/API/Proxies.cs
@@ -0,0 +1,22 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace API
+{
+ public class Proxies
+ {
+ public static List GetProxies()
+ {
+ var proxies = new List();
+ string[] proxiesArray = File.ReadAllLines("proxies.txt");
+ foreach (string proxy in proxiesArray)
+ {
+ proxies.Add($"{proxy}");
+ }
+ return proxies;
+ }
+ }
+}
diff --git a/src/ScreepsUserTracker/API/ScreepsAPI.cs b/src/ScreepsUserTracker/API/ScreepsAPI.cs
new file mode 100644
index 0000000..bf06810
--- /dev/null
+++ b/src/ScreepsUserTracker/API/ScreepsAPI.cs
@@ -0,0 +1,24 @@
+using Shared.Models;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace API
+{
+ public class ScreepsAPI
+ {
+ public static int Y {get;set;}
+ public async static Task GetScreepsHistory(int i, string url, string? proxyUrl = null)
+ {
+ var responseResult = await HttpHelper.GetAsync(url, proxyUrl);
+
+ Console.WriteLine($"{Y}-{i}: Is Success: {responseResult.IsSuccessStatusCode}");
+ Y += 1;
+ return responseResult;
+ }
+ }
+}
diff --git a/src/ScreepsUserTracker/ScreepsUserTracker.sln b/src/ScreepsUserTracker/ScreepsUserTracker.sln
new file mode 100644
index 0000000..910607e
--- /dev/null
+++ b/src/ScreepsUserTracker/ScreepsUserTracker.sln
@@ -0,0 +1,37 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.9.34607.119
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ScreepsUserTracker", "ScreepsUserTracker\ScreepsUserTracker.csproj", "{46426535-024D-4EF0-A08C-24F7B05F0DA0}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "API", "API\API.csproj", "{242E638D-7C01-4B41-8912-EF2D2A7A4E1D}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Shared", "Shared\Shared.csproj", "{4B6246B2-819F-40B8-BA26-654CB12032C5}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {46426535-024D-4EF0-A08C-24F7B05F0DA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {46426535-024D-4EF0-A08C-24F7B05F0DA0}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {46426535-024D-4EF0-A08C-24F7B05F0DA0}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {46426535-024D-4EF0-A08C-24F7B05F0DA0}.Release|Any CPU.Build.0 = Release|Any CPU
+ {242E638D-7C01-4B41-8912-EF2D2A7A4E1D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {242E638D-7C01-4B41-8912-EF2D2A7A4E1D}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {242E638D-7C01-4B41-8912-EF2D2A7A4E1D}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {242E638D-7C01-4B41-8912-EF2D2A7A4E1D}.Release|Any CPU.Build.0 = Release|Any CPU
+ {4B6246B2-819F-40B8-BA26-654CB12032C5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {4B6246B2-819F-40B8-BA26-654CB12032C5}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {4B6246B2-819F-40B8-BA26-654CB12032C5}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {4B6246B2-819F-40B8-BA26-654CB12032C5}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {2CAB7E7C-EF4C-4774-8A6E-11CC73044ED9}
+ EndGlobalSection
+EndGlobal
diff --git a/src/ScreepsUserTracker/ScreepsUserTracker/Program.cs b/src/ScreepsUserTracker/ScreepsUserTracker/Program.cs
new file mode 100644
index 0000000..119928d
--- /dev/null
+++ b/src/ScreepsUserTracker/ScreepsUserTracker/Program.cs
@@ -0,0 +1,31 @@
+using API;
+using Shared.Models;
+using System.Diagnostics;
+
+var proxies = Proxies.GetProxies();
+
+// 119KB
+var url = "https://screeps.com/room-history/shard0/E67N17/61400100.json";
+var proxyUrl2= "178.208.183.112:3128";
+var result = await ScreepsAPI.GetScreepsHistory(0, url, proxyUrl2);
+Console.WriteLine($"Status Code: {result.StatusCode}");
+Console.WriteLine($"Is Success: {result.IsSuccessStatusCode}");
+
+Stopwatch stopwatch = new Stopwatch();
+
+stopwatch.Start();
+
+List> tasks = new List>();
+//for (int i = 0; i < proxies.Count; i++)
+for (int i = 0; i < 240; i++)
+ {
+ var proxyUrl = proxies[i];
+ tasks.Add(ScreepsAPI.GetScreepsHistory(i, url, proxyUrl));
+}
+await Task.WhenAll(tasks);
+stopwatch.Stop();
+TimeSpan elapsedTime = stopwatch.Elapsed;
+Console.WriteLine($"Elapsed Time: {elapsedTime}");
+
+var results = tasks.Select(t => t.Result.IsSuccessStatusCode).ToList();
+Console.WriteLine($"Success: {results.Count(r => r)}");
\ No newline at end of file
diff --git a/src/ScreepsUserTracker/ScreepsUserTracker/ScreepsUserTracker.csproj b/src/ScreepsUserTracker/ScreepsUserTracker/ScreepsUserTracker.csproj
new file mode 100644
index 0000000..864ccf6
--- /dev/null
+++ b/src/ScreepsUserTracker/ScreepsUserTracker/ScreepsUserTracker.csproj
@@ -0,0 +1,15 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
diff --git a/src/ScreepsUserTracker/Shared/Models/HttpResponseResult.cs b/src/ScreepsUserTracker/Shared/Models/HttpResponseResult.cs
new file mode 100644
index 0000000..c4464a0
--- /dev/null
+++ b/src/ScreepsUserTracker/Shared/Models/HttpResponseResult.cs
@@ -0,0 +1,16 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Shared.Models
+{
+ public class HttpResponseResult
+ {
+ public string Content { get; set; } = "";
+ public HttpStatusCode StatusCode { get; set; }
+ public bool IsSuccessStatusCode { get; set; }
+ }
+}
diff --git a/src/ScreepsUserTracker/Shared/Models/ScreepsHistoryModel.cs b/src/ScreepsUserTracker/Shared/Models/ScreepsHistoryModel.cs
new file mode 100644
index 0000000..9e7cf0d
--- /dev/null
+++ b/src/ScreepsUserTracker/Shared/Models/ScreepsHistoryModel.cs
@@ -0,0 +1,12 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace Shared.Models
+{
+ public class ScreepsHistoryModel
+ {
+ }
+}
diff --git a/src/ScreepsUserTracker/Shared/Shared.csproj b/src/ScreepsUserTracker/Shared/Shared.csproj
new file mode 100644
index 0000000..fa71b7a
--- /dev/null
+++ b/src/ScreepsUserTracker/Shared/Shared.csproj
@@ -0,0 +1,9 @@
+
+
+
+ net8.0
+ enable
+ enable
+
+
+
diff --git a/src/logger.py b/src/logger.py
deleted file mode 100644
index 40cb31c..0000000
--- a/src/logger.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import logging
-import os
-
-def create_file_if_doesnt_exist(file_path):
- # Create the directory if it doesn't exist
- os.makedirs(os.path.dirname(file_path), exist_ok=True)
-
- # Create the file if it doesn't exist
- with open(file_path, 'a'):
- pass
-
-def create_logger(log_file):
- create_file_if_doesnt_exist(log_file)
-
- logger = logging.getLogger(log_file)
- logger.setLevel(logging.DEBUG)
-
- file_handler = logging.FileHandler(log_file)
- file_handler.setLevel(logging.DEBUG)
-
- formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
- file_handler.setFormatter(formatter)
-
- logger.addHandler(file_handler)
-
- return logger
-
-class Logger:
- def __init__(self, log_file_title):
- self.logger = create_logger(f"logs/{log_file_title}.log")
-
- def log(self, message, level=logging.INFO):
- if level == logging.INFO:
- self.info(message)
- elif level == logging.ERROR:
- self.error(message)
- elif level == logging.DEBUG:
- self.debug(message)
- else:
- self.logger.log(level, message)
-
- def info(self, message):
- self.logger.info(message)
-
- def error(self, message):
- self.logger.error(message,exc_info=True)
-
- def debug(self, message):
- self.logger.debug(message)
\ No newline at end of file
diff --git a/src/main.py b/src/main.py
deleted file mode 100644
index 9557aba..0000000
--- a/src/main.py
+++ /dev/null
@@ -1,76 +0,0 @@
-
-
-from dotenv import load_dotenv
-load_dotenv()
-from proxy_handler import get_proxies, make_screeps_history_request
-from multiprocessing.dummy import Pool
-import time
-import asyncio
-
-
-import os
-MAX_THREADS = os.getenv('MAX_THREADS')
-if not MAX_THREADS:
- raise ValueError("No MAX_THREADS provided")
-MAX_THREADS = int(MAX_THREADS)
-
-# 119KB
-proxies = get_proxies()
-urls = []
-url = "https://screeps.com/room-history/shard0/E67N17/61400100.json"
-for i in range(0, 1000):
- urls.append(url)
-
-results = []
-proxies_per_thread = {}
-urls_per_thread = {}
-
-
-for i, proxy in enumerate(proxies):
- thread = i % MAX_THREADS
- proxies_per_thread[thread] = proxies_per_thread.get(thread, []) + [proxy]
-for i, url in enumerate(urls):
- thread = i % MAX_THREADS
- urls_per_thread[thread] = urls_per_thread.get(thread, []) + [url]
-
-async def proxy_worker(proxy, urls, results):
- for index, url in enumerate(urls):
- result = await make_screeps_history_request(url, proxy)
- results.append(result)
- print(f"{index}: Processed {url} via {proxy} with result {result['status_code']}")
- return results
-
-def worker(data):
- thread_proxies = data['thread_proxies']
- thread_urls = data['thread_urls']
-
- urls_per_proxy = {}
- proxy_count = len(thread_proxies)
- for i, url in enumerate(thread_urls):
- proxy = i % proxy_count
- urls_per_proxy[proxy] = urls_per_proxy.get(proxy, []) + [url]
-
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
-
- results = []
- tasks = [loop.create_task(proxy_worker(proxy, urls_per_proxy[index], results)) for index, proxy in enumerate(thread_proxies)]
- loop.run_until_complete(asyncio.wait(tasks))
- loop.close()
-
- return results
-
-pool = Pool(MAX_THREADS)
-
-
-start_time = time.time()
-
-thread_results = pool.map(worker, [{'thread_proxies': proxies_per_thread[i], 'thread_urls': urls_per_thread[i]} for i in range(MAX_THREADS)])
-
-results = [result for thread_result in thread_results for result in thread_result]
-
-end_time = time.time()
-
-total_execution_time = end_time - start_time
-
-print(f"Total execution time: {total_execution_time} seconds")
\ No newline at end of file
diff --git a/src/proxy_handler.py b/src/proxy_handler.py
deleted file mode 100644
index 4e48ce4..0000000
--- a/src/proxy_handler.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import http.client
-import httpx
-import os
-import requests
-import json
-import aiohttp
-from urllib.parse import urlparse
-
-PROXYSCRAPE_TOKEN = os.getenv('PROXYSCRAPE_TOKEN')
-
-from logger import Logger
-proxy_logger = Logger("proxy_handler")
-
-def fetch_proxies():
- if not PROXYSCRAPE_TOKEN:
- raise ValueError("No PROXYSCRAPE_TOKEN provided")
-
- api_url = f"https://api.proxyscrape.com/v2/account/datacenter_shared/proxy-list?protocol=http&auth={PROXYSCRAPE_TOKEN}&type=getproxies&country[]=de&format=normal&status=all"
-
- parsed_url = urlparse(api_url)
- host = parsed_url.netloc
- path = parsed_url.path + "?" + parsed_url.query
-
- conn = http.client.HTTPSConnection(host)
- conn.request("GET", path)
- response = conn.getresponse()
- response_data = response.read().decode()
- conn.close()
-
- proxies = response_data.split('\r\n')
- proxies = [f"http://{proxy}" for proxy in proxies if proxy]
-
- return proxies
-
-def get_proxies():
- proxies = fetch_proxies()
- proxy_logger.info(f"Fetched {len(proxies)} proxies")
- return proxies
-
-# async def make_screeps_history_request(url, proxy):
- # proxies = {
- # "http": proxy,
- # "https": proxy
- # }
-
- # try:
- # response = requests.get(url, proxies=proxies)
- # json_data = response.json()
- # return {'status_code': response.status_code, 'data':json_data}
-
- # except Exception as e:
- # if 'response' in locals():
- # status_code = response.status_code
- # if status_code == 200:
- # status_code = 500
- # else:
- # status_code = None
-
- # proxy_logger.error(f"Error making request to {url} via proxy {proxy}: {e}")
- # return {'status_code': status_code, 'data':None}
-
-
-# async def make_screeps_history_request(url, proxy):
-# proxies = {
-# "http://": proxy,
-# "https://": proxy
-# }
-
-# async with httpx.AsyncClient(proxies=proxies) as client:
-# try:
-# response = await client.get(url)
-# json_data = response.json()
-# return {'status_code': response.status_code, 'data': json_data}
-
-# except Exception as e:
-# status_code = response.status_code if 'response' in locals() else None
-# proxy_logger.error(f"Error making request to {url} via proxy {proxy}: {e}")
-# return {'status_code': status_code, 'data': None}
-
-async def make_screeps_history_request(url, proxy):
- async with aiohttp.ClientSession() as session:
- try:
- async with session.get(url, proxy=proxy) as response:
- json_data = await response.json()
- return {'status_code': response.status, 'data': json_data}
-
- except Exception as e:
- status_code = response.status if 'response' in locals() else None
- proxy_logger.error(f"Error making request to {url} via proxy {proxy}: {e}")
- return {'status_code': status_code, 'data': None}
\ No newline at end of file