This Library is an ES6 version of the original PHP class @CrawlerDetect, it helps you detect bots/crawlers and spiders only by scanning the user-agent string or from the global request.headers
.
npm install es6-crawler-detect
'use strict';
const express = require('express')
const { Crawler, middleware } = require('es6-crawler-detect')
const app = express()
app.get('your/route', function async (request, response) {
// create a new Crawler instance
var CrawlerDetector = new Crawler(request)
// check the current visitor's useragent
if ( CrawlerDetector.isCrawler() )
{
// true if crawler user agent detected
}
// or check a user agent string
if ( CrawlerDetector.isCrawler('Mozilla/5.0 (compatible; Sosospider/2.0; http://help.soso.com/webspider.htm)') )
{
// true if crawler user agent detected
}
// Output the name of the bot that matched (if any)
response.send(CrawlerDetector.getMatches())
})
/**
* Or by using the middleware
*/
app.use(middleware((request, reponse) => {
// do something here
// e.g. console.log(request.body)
// e.g. return response.status(403).send('Forbidden')
}))
app.get('/crawler', function async (request, response) {
// or check a user agent string
request.Crawler.isCrawler('Mozilla/5.0 (compatible; Sosospider/2.0; http://help.soso.com/webspider.htm)')
// Output the name of the bot that matched (if any)
response.send(request.Crawler.getMatches())
})
import { Crawler, middleware } from 'es6-crawler-detect'
const CrawlerDetector = new Crawler()
// check the current visitor's useragent
if ( CrawlerDetector.isCrawler() )
{
// true if crawler user agent detected
}
// or check a user agent string
if ( CrawlerDetector.isCrawler('Mozilla/5.0 (compatible; Sosospider/2.0; http://help.soso.com/webspider.htm)') )
{
// true if crawler user agent detected
}
// Output the name of the bot that matched (if any)
console.log(CrawlerDetector.getMatches())
/**
* Or by using the middleware
*/
middleware((request, reponse) => {
// do something here
// e.g. console.log(request.body)
// e.g. return response.status(403).send('Forbidden')
})
If you find a bot/spider/crawler
user agent that CrawlerDetect fails to detect, please submit a pull request with the regex pattern added to the data
array in ./crawler/crawlers.ts
.