Skip to content

Commit 993557e

Browse files
committed
refactor(utils): centralize HTTP requests and update public IP caching in get_public_ip
- Replace internal low-level curl implementation with shared `send_query` (returns QueryResult) to unify proxy/TLS/timeout handling. - Prefer `FileCache` for IP caching (use system temp dir). Fall back to legacy JSON cache file on error to preserve previous behavior. - Convert proxy handling to build proxy URLs and call `send_query` for each proxy type. - Rename `debug` parameter to `verbose` and wire verbose logging through new calls. - Use `QueryResult.response` when available and skip on `QueryResult.error`. - Persist cache via `FileCache.write_cache` when available, otherwise write legacy JSON. - Add `tempfile` import and minor cleanup/comments. Notes: - No behavioral logic changes to IP extraction, only networking and caching plumbing refactor.
1 parent a080c17 commit 993557e

1 file changed

Lines changed: 56 additions & 70 deletions

File tree

‎proxy_checker/utils/get_public_ip.py‎

Lines changed: 56 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,9 @@
33
import time
44
import hashlib
55
import re
6+
import tempfile
67
from typing import Optional, Dict, Any, List
7-
import pycurl
8-
from io import BytesIO
9-
import certifi
8+
from .curl import send_query, QueryResult
109

1110

1211
IP_REGEX = re.compile(
@@ -25,64 +24,14 @@
2524
]
2625

2726

28-
def _curl_request(
29-
url: str,
30-
proxy: Optional[str],
31-
proxy_type: Optional[str],
32-
username: Optional[str],
33-
password: Optional[str],
34-
timeout: int = 5,
35-
) -> Optional[str]:
36-
37-
buffer = BytesIO()
38-
curl = pycurl.Curl()
39-
curl.setopt(pycurl.URL, url)
40-
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Python Proxy IP Checker)")
41-
curl.setopt(pycurl.TIMEOUT, timeout)
42-
curl.setopt(pycurl.CONNECTTIMEOUT, timeout)
43-
curl.setopt(pycurl.WRITEDATA, buffer)
44-
curl.setopt(pycurl.CAINFO, certifi.where())
45-
46-
# Proxy setup
47-
if proxy:
48-
curl.setopt(pycurl.PROXY, proxy)
49-
50-
if proxy_type:
51-
proxy_type = proxy_type.lower()
52-
if proxy_type == "http":
53-
curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_HTTP)
54-
elif proxy_type == "socks4":
55-
curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4)
56-
elif proxy_type == "socks5":
57-
curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
58-
elif proxy_type == "socks4a":
59-
curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS4A)
60-
elif proxy_type == "socks5h":
61-
curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
62-
63-
if username and password:
64-
curl.setopt(pycurl.PROXYUSERPWD, f"{username}:{password}")
65-
66-
try:
67-
curl.perform()
68-
http_code = curl.getinfo(pycurl.RESPONSE_CODE)
69-
except Exception:
70-
curl.close()
71-
return None
72-
73-
curl.close()
74-
75-
if http_code != 200:
76-
return None
77-
78-
return buffer.getvalue().decode(errors="ignore").strip()
27+
# Replaced _curl_request with calls to shared send_query (returns QueryResult)
7928

8029

8130
def get_public_ip(
8231
cache: bool = False,
8332
cache_timeout: int = 300,
8433
proxy_info: Dict[str, Any] = {},
85-
debug: bool = False,
34+
verbose: bool = False,
8635
) -> str:
8736

8837
# ------------ CACHE HANDLING ------------
@@ -99,13 +48,29 @@ def get_public_ip(
9948
cache_key = hashlib.md5(proxy_key.encode()).hexdigest() if proxy_key else ""
10049
cache_file = os.path.join(cache_dir, f"{cache_key}.cache")
10150

102-
if cache and cache_key and os.path.exists(cache_file):
51+
# Prefer FileCache when available to centralize cache logic; fall back to JSON
52+
fc = None
53+
if cache and cache_key:
10354
try:
104-
data = json.load(open(cache_file))
105-
if "ip" in data and "expires" in data and data["expires"] > time.time():
106-
return data["ip"]
55+
from ..FileCache import FileCache
56+
57+
fc = FileCache(cache_file)
58+
cached = fc.read_cache()
59+
if cached:
60+
return cached
10761
except Exception:
108-
pass
62+
# fallback to legacy JSON file check
63+
try:
64+
if os.path.exists(cache_file):
65+
data = json.load(open(cache_file))
66+
if (
67+
"ip" in data
68+
and "expires" in data
69+
and data["expires"] > time.time()
70+
):
71+
return data["ip"]
72+
except Exception:
73+
pass
10974

11075
# ------------ PROXY HANDLING ------------
11176
proxy_types = proxy_info.get("type", "")
@@ -124,27 +89,38 @@ def get_public_ip(
12489
for url in IP_SERVICES:
12590
if proxy:
12691
for ptype in proxy_types:
127-
if debug:
92+
if verbose:
12893
print(f"Trying {url} using proxy {proxy} type={ptype}")
12994

130-
response = _curl_request(url, proxy, ptype, username, password)
95+
# convert proxy and type to proxy URL for send_query
96+
proxy_url = f"{ptype}://{proxy}"
97+
qr: QueryResult = send_query(
98+
url=url,
99+
proxy=proxy_url,
100+
user=username,
101+
password=password,
102+
timeout=5000,
103+
verbose=verbose,
104+
)
105+
response = qr.response if (qr and not qr.error) else None
131106

132107
if response:
133-
if debug:
108+
if verbose:
134109
print(f"Response (proxy {ptype}): {response[:80]}")
135110
break
136111

137112
if response:
138113
break
139114

140115
else:
141-
if debug:
116+
if verbose:
142117
print(f"Trying {url} without proxy")
143118

144-
response = _curl_request(url, None, None, None, None)
119+
qr: QueryResult = send_query(url=url, timeout=5000, verbose=verbose)
120+
response = qr.response if (qr and not qr.error) else None
145121

146122
if response:
147-
if debug:
123+
if verbose:
148124
print(f"Response: {response[:80]}")
149125
break
150126

@@ -161,10 +137,20 @@ def get_public_ip(
161137
# ------------ SAVE CACHE ------------
162138
if cache and cache_key:
163139
try:
164-
json.dump(
165-
{"ip": ip, "expires": time.time() + cache_timeout},
166-
open(cache_file, "w"),
167-
)
140+
if fc:
141+
try:
142+
fc.write_cache(ip, expires_in=cache_timeout)
143+
except Exception:
144+
# fallback to JSON write
145+
json.dump(
146+
{"ip": ip, "expires": time.time() + cache_timeout},
147+
open(cache_file, "w"),
148+
)
149+
else:
150+
json.dump(
151+
{"ip": ip, "expires": time.time() + cache_timeout},
152+
open(cache_file, "w"),
153+
)
168154
except Exception:
169155
pass
170156

0 commit comments

Comments
 (0)