Skip to content

Commit

Permalink
feat: redirect manual
Browse files Browse the repository at this point in the history
  • Loading branch information
AndreiAlexandruParaschiv committed Nov 19, 2024
1 parent fdee6df commit 671d79a
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 10 deletions.
2 changes: 1 addition & 1 deletion src/sitemap/handler.js
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ export async function filterValidUrls(urls, log) {

const fetchUrl = async (url) => {
try {
const response = await fetch(url, { method: 'HEAD' });
const response = await fetch(url, { method: 'HEAD', redirect: 'manual' });
log.info(`URL ${url} returned status: ${response.status}`);
if (response.status === 200) {
return { status: OK, url };
Expand Down
15 changes: 6 additions & 9 deletions test/audits/sitemap.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ describe('Sitemap Audit', () => {
});
});

it.skip('runs successfully for common sitemap url when robots.txt is not available', async () => {
it('should return 404 when robots.txt not found', async () => {
nock(url)
.get('/robots.txt')
.reply(404);
Expand All @@ -261,17 +261,13 @@ describe('Sitemap Audit', () => {
const result = await sitemapAuditRunner(url, context);
expect(result).to.eql({
auditResult: {
paths: {
[`${url}/sitemap.xml`]: [`${url}/foo`, `${url}/bar`],
},
reasons: [
{
error: ERROR_CODES.FETCH_ERROR,
value: 'Fetch error for https://some-domain.adobe/robots.txt: Status 404',
},
],
success: false,
url,
},
fullAuditRef: url,
url,
Expand Down Expand Up @@ -488,7 +484,7 @@ describe('Sitemap Audit', () => {
}]);
});

it.skip('should delete extracted paths when no valid pages exist', async () => {
it('should return error when no valid pages exist', async () => {
nock(url)
.get('/robots.txt')
.reply(200, `Sitemap: ${url}/sitemap.xml`);
Expand All @@ -508,10 +504,11 @@ describe('Sitemap Audit', () => {
const result = await findSitemap(url);

expect(result.success).to.equal(false);
expect(result.reasons).to.deep.include({
value: 'No valid paths extracted from sitemaps.',
expect(result.reasons).to.deep.equal([{
error: ERROR_CODES.NO_VALID_PATHS_EXTRACTED,
});
value: `${url}/sitemap.xml`,
}]);
expect(result.paths).to.be.undefined;
});

it.skip('should return success when sitemap is found in robots.txt', async () => {
Expand Down

0 comments on commit 671d79a

Please sign in to comment.