File size: 8,638 Bytes
33071f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
"""
Proxy Handler Class
"""
import json
from queue import Queue
import time
import urllib.parse
import threading
import requests

class ThreadSafeDict(dict):
    """
    Thread safe dict
    """
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.lock = threading.Lock()
    def __getitem__(self, key):
        with self.lock:
            return super().__getitem__(key)
    def __setitem__(self, key, value):
        with self.lock:
            return super().__setitem__(key, value)
    def __delitem__(self, key):
        with self.lock:
            return super().__delitem__(key)
    def __contains__(self, key):
        with self.lock:
            return super().__contains__(key)
    def __len__(self):
        with self.lock:
            return super().__len__()
    def __iter__(self):
        with self.lock:
            return super().__iter__()
    def __repr__(self):
        with self.lock:
            return super().__repr__()
    def __str__(self):
        with self.lock:
            return super().__str__()

class ProxyHandler:
    """
    Sends request to http://{ip}:{port}/get_response_raw?url={url} with auth 
    """
    def __init__(self, proxy_list_file,proxy_auth="user:pass",port=80, wait_time=0.1,timeouts=10):
        self.proxy_auth = proxy_auth
        self.port = port
        self.proxy_list = []
        self.commit_time = ThreadSafeDict()
        self.timeouts = timeouts
        self.wait_time = wait_time
        self.lock = threading.Lock()
        self.last_logged_activities = Queue(maxsize=100)
        with open(proxy_list_file, 'r', encoding='utf-8') as f:
            for line in f:
                self.proxy_list.append(line.strip())
        for i, proxy in enumerate(self.proxy_list):
            if not proxy.startswith("http"):
                proxy = "http://" + proxy
            if ":" not in proxy:
                proxy += f":{self.port}"
            if not proxy.endswith("/"):
                proxy += "/"
            self.proxy_list[i] = proxy
        self.proxy_index = -1
    def log_time(self):
        """
        Logs the time
        """
        self.last_logged_activities.put(time.time())
        if self.last_logged_activities.full():
            # empty oldest
            self.last_logged_activities.get()
    def get_average_time(self):
        """
        Returns the average time
        """
        # get oldest and newest to get the average time
        if len(self.last_logged_activities.queue) > 1:
            return (self.last_logged_activities.queue[-1] - self.last_logged_activities.queue[0]) / self.last_logged_activities.qsize()
        return 0
    def wait_until_commit(self, proxy_index=None):
        """
        Waits until the commit time
        """
        if proxy_index is None:
            proxy_index = self.proxy_index
        if proxy_index not in self.commit_time:
            self.commit_time[proxy_index] = 0
        while time.time() < self.commit_time[proxy_index] + self.wait_time:
            time.sleep(0.01)
        self.commit_time[proxy_index] = time.time()
    def _update_proxy_index(self):
        """
        Updates the proxy index
        """
        with self.lock:
            self.proxy_index = (self.proxy_index + 1) % len(self.proxy_list)
            return self.proxy_index
    def get_response(self, url):
        """
        Returns the response of the url
        """
        url = urllib.parse.quote(url, safe='')
        try:
            index = self._update_proxy_index()
            self.wait_until_commit(index)
            self.log_time()
            response = requests.get(self.proxy_list[index] + f"get_response?url={url}", timeout=self.timeouts, auth=tuple(self.proxy_auth.split(":")))
            if response.status_code == 200:
                json_response = response.json()
                if json_response["success"]:
                    return json.loads(json_response["response"])
                else:
                    if "429" in json_response["response"]:
                        self.commit_time[index] = time.time() + self.timeouts
                        print(f"Error: {json_response['response']}, waiting {self.timeouts} seconds")
                    print(f"Failed in proxy side: {json_response['response']}")
                    return None
            elif response.status_code == 429:
                self.commit_time[index] = time.time() + self.timeouts
                print(f"Error: {response.status_code}, waiting {self.timeouts} seconds")
            else:
                print(f"Failed in proxy side: {response.status_code}")
                return None
        except Exception as e:
            print(f"Error while processing response from proxy: {e}")
            return None
    def get(self, url):
        """
        Returns the response of the url
        """
        url = urllib.parse.quote(url, safe='')
        try:
            index = self._update_proxy_index()
            self.wait_until_commit(index)
            response = requests.get(self.proxy_list[index] + f"get_response_raw?url={url}", timeout=self.timeouts, auth=tuple(self.proxy_auth.split(":")))
            if response.status_code == 200:
                return response
            else:
                print(f"Error: {response.status_code}")
                return None
        except Exception as e:
            print(f"Exception: {e}")
            return None
    def filesize(self, url):
        """
        Returns the filesize of the url
        """
        url = urllib.parse.quote(url, safe='')
        try:
            index = self._update_proxy_index()
            self.wait_until_commit(index)
            response = requests.get(self.proxy_list[index] + f"file_size?url={url}", timeout=self.timeouts, auth=tuple(self.proxy_auth.split(":")))
            if response.status_code == 200:
                return int(response.text)
            else:
                print(f"Error: {response.status_code} when getting filesize from {url}")
                return None
        except Exception as e:
            print(f"Exception: {e}")
            return None
    def get_filepart(self, url, start, end):
        """
        Returns the response of the url with range
        """
        url = urllib.parse.quote(url, safe='')
        try:
            index = self._update_proxy_index()
            self.wait_until_commit(index)
            response = requests.get(self.proxy_list[index] + f"filepart?url={url}&start={start}&end={end}", timeout=self.timeouts, auth=tuple(self.proxy_auth.split(":")))
            if response.status_code == 200:
                return response
            else:
                print(f"Error: {response.status_code}")
                return None
        except Exception as e:
            print(f"Exception: {e}")
            return None
    def check(self,raise_exception=False):
        """
        Checks if the proxies are working
        """
        failed_proxies = []
        for i, proxy in enumerate(self.proxy_list):
            try:
                response = requests.get(proxy, auth=tuple(self.proxy_auth.split(":")), timeout=2)
                if response.status_code == 200:
                    continue
                else:
                    print(f"Proxy {proxy} is not working")
                    failed_proxies.append(i)
            except Exception as e:
                print(f"Proxy {proxy} is not working: {e}")
                failed_proxies.append(i)
        if len(failed_proxies) > 0:
            if raise_exception:
                raise Exception(f"Proxies {failed_proxies} are not working")
            else:
                print(f"Proxies {failed_proxies} are not working, total {len(failed_proxies)} proxies of {len(self.proxy_list)} are not working")
                # remove failed proxies
                for i in failed_proxies[::-1]:
                    del self.proxy_list[i]
                if len(self.proxy_list) == 0:
                    raise Exception("No proxies available")
        else:
            print(f"All {len(self.proxy_list)} proxies are working")

class SingleProxyHandler(ProxyHandler):
    """
    Sends request to http://{ip}:{port}/get_response_raw?url={url} with auth
    """
    def __init__(self, proxy_url, proxy_auth="user:pass",port=80, wait_time=0.1,timeouts=10):
        self.proxy_auth = proxy_auth
        self.port = port
        self.proxy_list = [proxy_url]
        self.proxy_index = -1
        self.commit_time = ThreadSafeDict()
        self.timeouts = timeouts
        self.wait_time = wait_time
        self.lock = threading.Lock()