#include <stdio.h>
#include <stdlib.h>
#include <cpr.h>
void print_result(const cpr_response *response) {
printf("Status Code: %d\n", response->status_code);
printf("Headers:\n");
for (const cpr_header *header = response->headers; header != NULL; header++) {
printf("%s: %s\n", header->name, header->value);
}
printf("\nBody:\n");
printf("%s\n", response->body);
}
int main() {
// Set the request URL
const char *url = "https://www.10jqka.com.cn/";
// Set the proxy host and port
const char *proxy_host = "";
int proxy_port = ;
// Set the request method and headers
const char *method = "GET";
const cpr_header *headers[] = {
{"User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"},
{"Proxy-Connection", "keep-alive"},
};
int header_count = sizeof(headers) / sizeof(headers[0]);
// Initialize the request structure
cpr_request request = {
.method = method,
.url = url,
.headers = headers,
.header_count = header_count,
.proxy_host = proxy_host,
.proxy_port = proxy_port,
};
// Create a cpr context
cpr_context context = cprCreateContext();
// Send the request
cprResponse response = cprSendRequest(context, &request);
// Print the response
print_result(response);
// Cleanup
cprCloseContext(context);
return 0;
}
在实际使用时,你需要确保你是可用的。在进行任何爬虫操作之前,你应该仔细阅读并理解相关的法律和规定。