Parse through a sitemaps xml to get all the urls for your crawler.
npm install sitemapper --save
var Sitemapper = require('sitemapper');
var sitemap = new Sitemapper();
sitemap.fetch('http://wp.seantburke.com/sitemap.xml').then(function(sites) {
console.log(sites);
});
var Sitemapper = require('sitemapper');
var Google = new Sitemapper({
url: 'https://www.google.com/work/sitemap.xml',
timeout: 15000 //15 seconds
});
Google.fetch()
.then(function (data) {
console.log(data);
})
.catch(function (error) {
console.log(error);
});
// or
var sitemap = new Sitemapper();
sitemapper.timeout = 5000;
sitemapper.fetch('http://wp.seantburke.com/sitemap.xml')
.then(function (data) {
console.log(data);
})
.catch(function (error) {
console.log(error);
});
import Sitemapper from 'sitemapper';
const Google = new Sitemapper({
url: 'https://www.google.com/work/sitemap.xml',
timeout: 15000, // 15 seconds
});
Google.fetch()
.then(data => console.log(data.sites))
.catch(error => console.log(error));
// or
const sitemapper = new Sitemapper();
sitemapper.timeout = 5000;
sitemapper.fetch('http://wp.seantburke.com/sitemap.xml')
.then(({ url, sites }) => console.log(`url:${url}`, 'sites:', sites))
.catch(error => console.log(error));
npm install sitemapper@1.1.1 --save
var Sitemapper = require('sitemapper');
var sitemapper = new Sitemapper();
sitemapper.getSites('http://wp.seantburke.com/sitemap.xml', function(err, sites) {
if (!err) {
console.log(sites);
}
});