- 购买套餐
- 接口文档
- Java 代码示例
- C# 代码示例
- PHP 代码示例
- Python 代码示例
套餐类型 | 可使用时间 | 每次接口返回IP个数 | 预计可用IP量 | IP有效期 | 原价 | 折后价 | 购买 |
---|---|---|---|---|---|---|---|
套餐零·0.1元测 | 2小时 | ─ + | 无上限,至少27000 | 60秒 | 2 元 | 0.1 元 | 0.1元测 |
套餐一·6小时 | 6小时 | - + | 15000 左右 | 60秒 | 12.0 元 | 5.0 元 | 续费充值 |
套餐二·包天 | 24小时 | - + | 15000 左右 | 60秒 | 24.0 元 | 10.0 元 | 续费充值 |
套餐三·包周 | 7天 | - + | 15000 左右 | 60秒 | 134.4 元 | 56.0 元 | 续费充值 |
套餐四·包月 | 31天 | - + | 15000 左右 | 60秒 | 384.0 元 | 160.0 元 | 续费充值 |
套餐五·包季度 | 93天 | - + | 15000 左右 | 60秒 | 960.0 元 | 400.0 元 | 续费充值 |
套餐六·包半年 | 180天 | - + | 15000 左右 | 60秒 | 1728.0 元 | 720.0 元 | 续费充值 |
套餐七·包年 | 365天 | - + | 15000 左右 | 60秒 | 3000.0 元 | 1250.0 元 | 续费充值 |
1. 调用接口每次返回几个IP?
默认返回1个的,调整"每次接口返回IP个数"可以返回多个IP,不同返回量价格不同,请根据自己项目情况调整,也可以购买后可以补差价调整。
2. 具体怎么使用呢 ?
我们提供的是短效代理IP,用于数据采集业务,提供API接口,您采集程序接入即可。
3. IP有效期是什么意思,购买后可以修改吗 ?
IP有效期是说IP最长的可用时间,本站最长可用60秒。
4. 怎么开发票呢 ?
本站报价均不含税,累积消费满1000元可申请增值税普通发票,如需专票需额外缴3%税点,联系客服开票。
接口说明
1. 接口地址:http://dynamic.goubanjia.com/dynamic/get/xxx.html
2. 具有编程能力的开发者可在程序中调用API实时批量获取IP列表,接口生成后不变,可写死在程序代码中,生成接口
3. HTTP GET方式调用,返回文本列表,一行一个IP,默认返回结果多个IP以\n分隔
4. 请保证每秒钟调用接口次数不超过10次,否则将返回“too many request”提示
参数 | 是否必填 | 参数说明 | 取值说明 |
---|---|---|---|
order | 是 | 订单号 | 请到用户中心获取 |
ttl | 否 | 带上此参数,将返回IP剩余可用时间,单位毫秒,并以逗号分隔 返回格式为 12.12.12.12:38442,55345 |
1 |
random | 否 | 不带此参数优先返回速度快的IP 加上此参数优先返回可用时长比较长的IP |
true |
sep | 否 | 每次调用接口返回多个IP之间的分隔符 |
0 - 回车换行 1 - 换行回车 2 - 回车 3 - 换行 4 - 空格 5 - 英文逗号 6 - 英文分号 |
返回结果示例(文本,带TTL参数)
221.32.22.54:33084,55034 121.112.13.88:18049,59022 200.113.123.114:55231,35334
返回结果示例(文本,不带TTL参数)
221.32.22.54:33084 121.112.13.88:18049 200.113.123.114:55231
返回结果示例(调用接口太频繁,1秒提取次数超过10次)
{msg: "请控制好请求频率,1秒内不要超过10次!",success: false}
下载完整工程(Maven版) 下载完整工程(带所需jar包) 下载所需jar包
package com.goubanjia.test;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.List;
import com.gargoylesoftware.htmlunit.BrowserVersion;
import com.gargoylesoftware.htmlunit.ProxyConfig;
import com.gargoylesoftware.htmlunit.WebClient;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
/**
* 这个DEMO主要为了测试动态代理IP的稳定性
* 也可以作为爬虫参考项目,如需使用,请自行修改代码webParseHtml方法
*/
public class TestDynamicIp {
public static List ipList = new ArrayList<>();
public static boolean gameOver = false;
public static void main(String[] args) {
long fetchIpSeconds = 5;
int threadNum = 10;
int testTime = 3;
// 请填写全网代理IP订单号,填写之后才可以提取到IP哦
String order = "一定要把这里改为单号哦~";
// 你要抓去的目标网址
String targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
// 是否加载JS,加载JS会导致速度变慢
boolean useJS = false;
// 请求超时时间,单位毫秒,默认5秒
int timeOut = 5000;
if (order == null || "".equals(order)) {
System.err.println("请输入全网代理IP动态代理订单号");
return;
}
System.out.println(">>>>>>>>>>>>>>全网代理动态IP测试开始<<<<<<<<<<<<<<");
System.out.println("***************");
System.out.println("接口返回IP为国内各地区,每次最多返回10个");
System.out.println("提取IP间隔 " + fetchIpSeconds + " 秒 ");
System.out.println("开启爬虫线程 " + threadNum);
System.out.println("爬虫目标网址 " + targetUrl);
System.out.println("测试次数 3 ");
System.out.println("***************\n");
/**
* 信任所有证书,当请求HTTPS网址时需要,否则报错:
* sun.security.validator.ValidatorException: PKIX path building failed:
* sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target
**/
TrustCertsUtil.trustAllHttpsCertificates(); // HTTPCLIENT 跳过证书认证的方法请参考 http://www.goubanjia.com/help/article-67.html
TestDynamicIp tester = new TestDynamicIp();
new Thread(tester.new GetIP(fetchIpSeconds * 1000, testTime, order)).start();
for (int i = 0; i < threadNum; i++) {
tester.new Crawler(100, targetUrl, useJS, timeOut).start();
}
while(!gameOver){
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println(">>>>>>>>>>>>>>全网代理动态IP测试结束<<<<<<<<<<<<<<");
System.exit(0);
}
// 信任所有证书
public class TrustCertsUtil {
public static void trustAllHttpsCertificates() {
// 如果爬虫请求HTTPS网址,必须加入这两行
System.setProperty("jdk.http.auth.proxying.disabledSchemes", "");
System.setProperty("jdk.http.auth.tunneling.disabledSchemes", "");
try {
javax.net.ssl.TrustManager[] trustAllCerts = new javax.net.ssl.TrustManager[1];
javax.net.ssl.TrustManager tm = new MITM();
trustAllCerts[0] = tm;
javax.net.ssl.SSLContext sc = javax.net.ssl.SSLContext.getInstance("TLS");
sc.init(null, trustAllCerts, null);
javax.net.ssl.HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
} catch (Exception e) {
System.err.println("【DATA5U】设置证书出错,原因:" + e.getMessage());
}
}
static class MITM implements javax.net.ssl.TrustManager, javax.net.ssl.X509TrustManager {
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
public boolean isServerTrusted(java.security.cert.X509Certificate[] certs) {
return true;
}
public boolean isClientTrusted(java.security.cert.X509Certificate[] certs) {
return true;
}
public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType)
throws java.security.cert.CertificateException {
return;
}
public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType)
throws java.security.cert.CertificateException {
return;
}
}
}
// 抓取目标站,检测IP
public class Crawler extends Thread{
@Override
public void run() {
while(!gameOver){
webParseHtml(targetUrl);
try {
Thread.sleep(sleepMs);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
long sleepMs = 200;
boolean useJs = false;
String targetUrl = "";
int timeOut = 5000;
public Crawler(long sleepMs, String targetUrl, boolean useJs, int timeOut) {
this.sleepMs = sleepMs;
this.targetUrl = targetUrl;
this.useJs = useJs;
this.timeOut = timeOut;
}
public String webParseHtml(String url) {
String html = "";
BrowserVersion[] versions = {BrowserVersion.INTERNET_EXPLORER_11, BrowserVersion.CHROME, BrowserVersion.FIREFOX_38, BrowserVersion.INTERNET_EXPLORER_8};
WebClient client = new WebClient(versions[(int)(versions.length * Math.random())]);
try {
client.getOptions().setThrowExceptionOnFailingStatusCode(false);
client.getOptions().setJavaScriptEnabled(useJs);
client.getOptions().setCssEnabled(false);
client.getOptions().setThrowExceptionOnScriptError(false);
client.getOptions().setTimeout(timeOut);
client.getOptions().setAppletEnabled(true);
client.getOptions().setGeolocationEnabled(true);
client.getOptions().setRedirectEnabled(true);
// 这行代码允许访问HTTPS网站,防止报SSL证书错误
client.getOptions().setUseInsecureSSL(true);
String ipport = getAProxy();
if (ipport != null) {
ProxyConfig proxyConfig = new ProxyConfig(ipport.split(":")[0], Integer.parseInt(ipport.split(":")[1]));
client.getOptions().setProxyConfig(proxyConfig);
}else {
System.out.print(".");
return "";
}
HtmlPage page = client.getPage(url);
html = page.asXml();
System.out.println(getName() + " 使用代理 " + ipport + "请求目标网址返回HTML:" + html);
} catch (Exception e) {
return webParseHtml(url);
} finally {
client.close();
}
return html;
}
private String getAProxy() {
if (ipList.size() > 0) {
String ip = ipList.get((int)(Math.random() * ipList.size()));
return ip ;
}
return null;
}
}
// 定时获取动态IP
public class GetIP implements Runnable{
long sleepMs = 1000;
int maxTime = 3;
String order = "";
public GetIP(long sleepMs, int maxTime, String order) {
this.sleepMs = sleepMs;
this.maxTime = maxTime;
this.order = order;
}
@Override
public void run() {
long getIpTime = 0;
int time = 1;
while(!gameOver){
if(time >= 4){
gameOver = true;
break;
}
try {
java.net.URL url = new java.net.URL("http://dynamic.goubanjia.com/dynamic/get/" + order + ".html?ttl");
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setConnectTimeout(3000);
connection = (HttpURLConnection)url.openConnection();
InputStream raw = connection.getInputStream();
InputStream in = new BufferedInputStream(raw);
byte[] data = new byte[in.available()];
int bytesRead = 0;
int offset = 0;
while(offset < data.length) {
bytesRead = in.read(data, offset, data.length - offset);
if(bytesRead == -1) {
break;
}
offset += bytesRead;
}
in.close();
raw.close();
String[] res = new String(data, "UTF-8").split("\n");
List ipList = new ArrayList<>();
for (String ip : res) {
try {
String[] parts = ip.split(",");
if (Integer.parseInt(parts[1]) > 0) {
ipList.add(parts[0]);
}
} catch (Exception e) {
}
}
if (ipList.size() > 0) {
TestDynamicIp.ipList = ipList;
System.out.println("第" + ++getIpTime + "次获取动态IP " + ipList.size() + " 个");
time += 1;
}
} catch (Exception e) {
e.printStackTrace();
System.err.println(">>>>>>>>>>>>>>获取IP出错");
}
try {
Thread.sleep(sleepMs);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
}
using System;
namespace TestDynamicIp
{
class Program
{
static string order = "这里需要改为您自己的IP提取码";
static bool run = true;
static string ipApi = "http://dynamic.goubanjia.com/dynamic/get/" + order + ".html";
static string targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
public static void Main(string[] args)
{
// 抓取HTTPS网址时,需要接受证书,否则会报错:基础链接已经关闭,未能为SSL/TLS 安全通道建立信任关系
System.Net.ServicePointManager.ServerCertificateValidationCallback = new System.Net.Security.RemoteCertificateValidationCallback((object sender, System.Security.Cryptography.X509Certificates.X509Certificate certificate, System.Security.Cryptography.X509Certificates.X509Chain chain, System.Net.Security.SslPolicyErrors errors) =>
{
return true; //总是接受证书
});
System.Net.ServicePointManager.SecurityProtocol = System.Net.SecurityProtocolType.Ssl3 | System.Net.SecurityProtocolType.Tls;
Console.WriteLine("\n********************\nC#测试爬虫(动态)代理IP\n********************\n");
Console.WriteLine("请求网址为:" + targetUrl);
StartCrawl();
Console.WriteLine("\n********************\n测试结束,按任意键退出程序\n********************\n");
Console.ReadKey(true);
}
public static void StartCrawl()
{
for (int i = 0; i < 10; i++)
{
Console.WriteLine("\n>>>>>>>>>>>>>>>>>>>>>>>第" + i + "次请求测试");
// 获取动态IP
Uri uri = new Uri(ipApi);
System.Net.HttpWebRequest request = (System.Net.HttpWebRequest)System.Net.WebRequest.Create(ipApi);
// 设置超时时间为20秒
request.Timeout = 20000;
System.Net.HttpWebResponse response = (System.Net.HttpWebResponse)request.GetResponse();
System.IO.Stream stream = response.GetResponseStream();
System.IO.StreamReader reader = new System.IO.StreamReader(stream);
String iptxt = reader.ReadToEnd();
reader.Dispose();
reader.Close();
string[] ipports = iptxt.Split(new String[] { "\n" }, StringSplitOptions.None);
System.Threading.Thread.Sleep(1000);
try
{
// 定义client
string ipport = ipports[(int)(ipports.Length * new Random().Next(0, 1))];
System.Net.WebClient client = new System.Net.WebClient();
client.Encoding = System.Text.Encoding.GetEncoding("GB2312");
// 设置代理
System.Net.WebProxy proxy = new System.Net.WebProxy();
proxy.Address = new Uri("http://" + ipports[0] + "/");
client.Proxy = proxy;
// 获取网页内容
byte[] byResponse = client.DownloadData(targetUrl);
String txt = System.Text.Encoding.GetEncoding("GB2312").GetString(byResponse);
Console.WriteLine("使用代理" + ipports[0] + "得到如下内容:\n" + txt);
client.Dispose();
}
catch (Exception e) {
Console.WriteLine("出错了:" + e.Message);
}
}
}
}
}
<?
echo "
PHP 测试动态代理IP效果,在浏览器访问本PHP页面即可看到效果
";
// 定义全网代理IP接口订单号
$order = "这里填写您在全网代理IP的API订单号,才可以正常工作哦~";
$ipApi = "http://dynamic.goubanjia.com/dynamic/get/$order.html";
echo "
API地址:$ipApi
";
// 定义目标网址
$targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
// 模拟浏览器
$user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0";
// 开始测试
echo "
*******************************
";
for($i = 0; $i < 5; $i++){
try{
// 获取代理
$ch = curl_init();
$timeout = 5;
curl_setopt($ch, CURLOPT_URL, $ipApi);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, $timeout);
$ipports = explode("\n", curl_exec($ch));
curl_close($ch);
echo "
>>>>>获取到并使用代理IP:" . $ipports[0];
// 设置代理抓取目标网址
$ipport = $ipports[0];
$cs = curl_init();
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 0); // 对认证证书来源的检查
curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 0); // 从证书中检查SSL加密算法是否存在
curl_setopt($ch, CURLOPT_SSLVERSION, 2);//设置SSL协议版本号,可选1,2,3;具体参考http://www.goubanjia.com/help/article-69.html
curl_setopt($cs, CURLOPT_PROXY, $ipport);
curl_setopt($cs, CURLOPT_URL, $targetUrl);
curl_setopt($cs, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($cs, CURLOPT_CONNECTTIMEOUT, $timeout);
// 设置Cookies,如果没有请把此行注释
curl_setopt($cs, CURLOPT_COOKIEJAR, "c:\cookie.txt");
// 设置User-Agent
curl_setopt($cs, CURLOPT_USERAGENT, $user_agent);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
// 设置Header
curl_setopt($ch, CURLOPT_HEADER, 0);
curl_setopt($ch, CURLOPT_HTTPHEADER, array('accept-encoding:gzip, deflate, sdch, br', 'origin:http://www.baidu.com'));
// 设置Referer
curl_setopt($ch, CURLOPT_REFERER, "http://www.baidu.com/");
//启动跳转链接
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
//多级自动跳转
curl_setopt($ch, CURLOPT_AUTOREFERER, true);
// GET请求
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, 'GET');
$html = curl_exec($cs);
echo ",获取到如下HTML信息 " . $html . "
";
curl_close($cs);
}catch(Exception $e){
echo "
>>>>>请求出错!!
";
}
sleep(1);
}
?>
Python 2.X 动态代理IP Python 3.X 动态代理IP Python 3.X Socks5代理IP
# -*- coding: UTF-8 -*-
'''
Python 2.X
无忧代理IP Created on 2017年08月21日
描述:本DEMO演示了使用爬虫(动态)代理IP请求网页的过程,代码使用了多线程
逻辑:每隔5秒从API接口获取IP,对于每一个IP开启一个线程去抓取网页源码
@author: www.data5u.com
'''
import time
import threading
import requests
import sys
# 解决编码报错问题
reload(sys)
sys.setdefaultencoding('utf8')
ips = [];
# 爬数据的线程类
class CrawlThread(threading.Thread):
def __init__(self,proxyip):
super(CrawlThread, self).__init__();
self.proxyip=proxyip;
def run(self):
# 开始计时
start = time.time()
# 请求头
headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
}
#使用代理IP请求网址,注意第三个参数verify=False意思是跳过SSL验证(可以防止报SSL错误)
html=requests.get(
url=targetUrl,
proxies={"http" : "http://" + self.proxyip, "https" : "https://" + self.proxyip},
verify=False,
timeout=15,
headers=headers
).content.decode()
# 结束计时
end = time.time()
# 输出内容
print ( threading.current_thread().getName() + "耗时 " + str(end - start) + "毫秒 " + self.proxyip + " 获取到如下HTML内容:\n" + html + "\n*************" )
# 获取代理IP的线程类
class GetIpThread(threading.Thread):
def __init__(self,fetchSecond):
super(GetIpThread, self).__init__();
self.fetchSecond=fetchSecond;
def run(self):
global ips;
while True:
# 获取IP列表
res = urllib.urlopen(apiUrl).read().strip("\n");
# 按照\n分割获取到的IP
ips = res.split("\n");
# 利用每一个IP
for proxyip in ips:
if proxyip.strip():
# 开启一个线程
CrawlThread(proxyip).start();
# 休眠
time.sleep(self.fetchSecond);
if __name__ == '__main__':
# 这里填写无忧代理IP提供的API订单号(请到用户中心获取)
order = "please-input-your-order-here";
# 获取IP的API接口
apiUrl = "http://dynamic.goubanjia.com/dynamic/get/" + order + ".html";
# 要抓取的目标网站地址
targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
# 获取IP时间间隔,建议为5秒
fetchSecond = 5;
# 开始自动获取IP
GetIpThread(fetchSecond).start();
# -*- coding: UTF-8 -*-
'''
Python 3.x
无忧代理IP Created on 2018年05月11日
描述:本DEMO演示了使用爬虫(动态)代理IP请求网页的过程,代码使用了多线程
逻辑:每隔5秒从API接口获取IP,对于每一个IP开启一个线程去抓取网页源码
@author: www.data5u.com
'''
import requests;
import time;
import threading;
import urllib3;
ips = [];
# 爬数据的线程类
class CrawlThread(threading.Thread):
def __init__(self,proxyip):
super(CrawlThread, self).__init__();
self.proxyip=proxyip;
def run(self):
# 开始计时
start = time.time();
#消除关闭证书验证的警告
urllib3.disable_warnings();
#使用代理IP请求网址,注意第三个参数verify=False意思是跳过SSL验证(可以防止报SSL错误)
html=requests.get(url=targetUrl, proxies={"http" : 'http://' + self.proxyip, "https" : 'https://' + self.proxyip}, verify=False, timeout=15).content.decode()
# 结束计时
end = time.time();
# 输出内容
print(threading.current_thread().getName() + "使用代理IP, 耗时 " + str(end - start) + "毫秒 " + self.proxyip + " 获取到如下HTML内容:\n" + html + "\n*************")
# 获取代理IP的线程类
class GetIpThread(threading.Thread):
def __init__(self,fetchSecond):
super(GetIpThread, self).__init__();
self.fetchSecond=fetchSecond;
def run(self):
global ips;
while True:
# 获取IP列表
res = requests.get(apiUrl).content.decode()
# 按照\n分割获取到的IP
ips = res.split('\n');
# 利用每一个IP
for proxyip in ips:
if proxyip.strip():
# 开启一个线程
CrawlThread(proxyip).start();
# 休眠
time.sleep(self.fetchSecond);
if __name__ == '__main__':
# 这里填写无忧代理IP提供的API订单号(请到用户中心获取)
order = "请把这里替换为您的IP提取码";
# 获取IP的API接口
apiUrl = "http://dynamic.goubanjia.com/dynamic/get/" + order + ".html";
# 要抓取的目标网站地址
targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
# 获取IP时间间隔,建议为5秒
fetchSecond = 5;
# 开始自动获取IP
GetIpThread(fetchSecond).start();
# -*- coding: UTF-8 -*-
'''
Python 3.x
无忧代理IP Created on 2018年05月11日
描述:本DEMO演示了使用爬虫(动态)代理IP请求网页的过程,代码使用了多线程
逻辑:每隔5秒从API接口获取IP,对于每一个IP开启一个线程去抓取网页源码
注意:需先安装socks模块 pip3 install 'requests[socks]'
@author: www.data5u.com
'''
import requests;
import time;
import threading;
import urllib3;
ips = [];
# 爬数据的线程类
class CrawlThread(threading.Thread):
def __init__(self,proxyip):
super(CrawlThread, self).__init__();
self.proxyip=proxyip;
def run(self):
# 开始计时
start = time.time();
#消除关闭证书验证的警告
urllib3.disable_warnings();
#使用代理IP请求网址,注意第三个参数verify=False意思是跳过SSL验证(可以防止报SSL错误)
html=requests.get(url=targetUrl, proxies={"http" : 'socks5://user:password@' + self.proxyip, "https" : 'socks5://user:password@' + self.proxyip}, verify=False, timeout=15).content.decode()
# 结束计时
end = time.time();
# 输出内容
print(threading.current_thread().getName() + "使用代理IP, 耗时 " + str(end - start) + "毫秒 " + self.proxyip + " 获取到如下HTML内容:\n" + html + "\n*************")
# 获取代理IP的线程类
class GetIpThread(threading.Thread):
def __init__(self,fetchSecond):
super(GetIpThread, self).__init__();
self.fetchSecond=fetchSecond;
def run(self):
global ips;
while True:
# 获取IP列表
res = requests.get(apiUrl).content.decode()
# 按照\n分割获取到的IP
ips = res.split('\n');
# 利用每一个IP
for proxyip in ips:
if proxyip.strip():
# 开启一个线程
CrawlThread(proxyip).start();
# 休眠
time.sleep(self.fetchSecond);
if __name__ == '__main__':
# 这里填写无忧代理IP提供的API订单号(请到用户中心获取)
order = "请把这里替换为您的IP提取码";
# 获取IP的API接口
apiUrl = "http://dynamic.goubanjia.com/dynamic/get/" + order + ".html";
# 要抓取的目标网站地址
targetUrl = "http://pv.sohu.com/cityjson?ie=utf-8";
# 获取IP时间间隔,建议为5秒
fetchSecond = 5;
# 开始自动获取IP
GetIpThread(fetchSecond).start();