39 lines
1.0 KiB
JavaScript
39 lines
1.0 KiB
JavaScript
const express = require('express');
|
|
const { shouldBlockCrawler, extractUserAgent } = require('crawl-me-not');
|
|
|
|
const app = express();
|
|
|
|
// Middleware to block AI crawlers
|
|
app.use((req, res, next) => {
|
|
const userAgent = extractUserAgent(req.headers);
|
|
const result = shouldBlockCrawler(userAgent, {
|
|
blockAI: true,
|
|
blockSEO: false,
|
|
debug: true
|
|
});
|
|
|
|
if (result.isBlocked) {
|
|
console.log(`Blocked ${result.crawlerType} crawler: ${result.userAgent}`);
|
|
return res.status(403).json({
|
|
error: 'Access denied',
|
|
reason: `${result.crawlerType} crawler detected`,
|
|
userAgent: result.userAgent
|
|
});
|
|
}
|
|
|
|
next();
|
|
});
|
|
|
|
app.get('/', (req, res) => {
|
|
res.json({ message: 'Hello World! AI crawlers are blocked.' });
|
|
});
|
|
|
|
app.get('/api/data', (req, res) => {
|
|
res.json({ data: 'This API is protected from AI crawlers' });
|
|
});
|
|
|
|
const port = process.env.PORT || 3000;
|
|
app.listen(port, () => {
|
|
console.log(`Server running on port ${port}`);
|
|
console.log('AI crawlers will receive a 403 response');
|
|
});
|