39 lines
1.1 KiB
JavaScript
39 lines
1.1 KiB
JavaScript
const http = require('node:http');
|
|
const { shouldBlockCrawler, extractUserAgent } = require('crawl-me-not');
|
|
|
|
const server = http.createServer((req, res) => {
|
|
const userAgent = extractUserAgent(req.headers);
|
|
const result = shouldBlockCrawler(userAgent, {
|
|
blockAI: true,
|
|
blockSEO: false,
|
|
debug: true
|
|
});
|
|
|
|
if (result.isBlocked) {
|
|
console.log(`Blocked ${result.crawlerType} crawler: ${result.userAgent}`);
|
|
|
|
res.statusCode = 403;
|
|
res.setHeader('Content-Type', 'application/json');
|
|
res.setHeader('X-Blocked-Reason', 'AI crawler detected');
|
|
res.end(JSON.stringify({
|
|
error: 'Access denied',
|
|
reason: `${result.crawlerType} crawler detected`,
|
|
userAgent: result.userAgent
|
|
}));
|
|
return;
|
|
}
|
|
|
|
// Normal request handling
|
|
res.statusCode = 200;
|
|
res.setHeader('Content-Type', 'application/json');
|
|
res.end(JSON.stringify({
|
|
message: 'Hello World!',
|
|
timestamp: new Date().toISOString()
|
|
}));
|
|
});
|
|
|
|
const port = process.env.PORT || 3000;
|
|
server.listen(port, () => {
|
|
console.log(`Server running on port ${port}`);
|
|
console.log('AI crawlers will receive a 403 response');
|
|
});
|