Skip to content

Commit

Permalink
Fix defects found in code scan by dependabot
Browse files Browse the repository at this point in the history
  • Loading branch information
[email protected] committed May 3, 2024
1 parent a65cea7 commit 854e4af
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions legiscope.nodejs/sp-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ async function extract_hosts_from_urlarray( target_url, result )
{//{{{
// URLFIX
// let g = normalizeUrl(result.shift()); // result.shift().replace(/\/\.\.\//,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let g = result.shift().replace(/\/\.\.\//,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let g = result.shift().replace(/\/\.\.\//g,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');

if ( process.env.PERMIT_HTTP === undefined ) {
let h = g.replace(/^http:/,'https:');
Expand All @@ -497,7 +497,7 @@ async function extract_hosts_from_urlarray( target_url, result )
"Accept-Language" : "en-US,en;q=0.5",
"Accept-Encoding" : "gzip, deflate, br",
"Connection" : "keep-alive",
"Referer" : target_url.replace(/\/\.\.\//,'/').replace(/\/$/,''),
"Referer" : target_url.replace(/\/\.\.\//g,'/').replace(/\/$/,''),
"Cookie" : stringified_cookies( cookies ),
"Upgrade-Insecure-Requests" : 1,
"Sec-Fetch-Dest" : "document",
Expand Down Expand Up @@ -724,7 +724,7 @@ function recompute_filepaths_from_url(target)
.replace(/[\/]{1,}/gi,'/') // Replace multiple forward-slashes to '/'
.replace(/[\/]{1,}$/,'') // Trim multiple trailing slashes
.replace(/^[\/]{1,}/,'') // Trim multile leading slashes
.replace(/\/\.\.\//,'/') // Remove intervening double-dot components
.replace(/\/\.\.\//g,'/') // Remove intervening double-dot components
.replace(/[\/.]$/,'') // Remove trailing slash-dot
.split('/');

Expand Down Expand Up @@ -1212,7 +1212,7 @@ async function fetch_and_extract( initial_target, depth )

while ( targets.length > 0 && depth_iterations < 10 ) {

let target = targets.shift().replace(/\/\.\.\//,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let target = targets.shift().replace(/\/\.\.\//g,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let page_assets = new Map;
let have_extracted_urls = false;
let iteration_subjects;
Expand Down Expand Up @@ -1395,7 +1395,7 @@ async function fetch_and_extract( initial_target, depth )
// Append all page asset URLs to the array of DOM-embedded URLs.
page_assets.forEach( (headers, urlraw, map) => {
// URLFIX
let url = urlraw.replace(/\/\.\.\//,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let url = urlraw.replace(/\/\.\.\//g,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
if ( (process.env['SILENT_PARSE'] === undefined) ) console.log("%d Adding %s", extractedUrls.length, urlraw );
if ( (process.env['SILENT_PARSE'] === undefined) ) console.log("%d as %s", extractedUrls.length, url );
extractedUrls.push(url);
Expand Down Expand Up @@ -1438,7 +1438,7 @@ async function fetch_and_extract( initial_target, depth )
step_targets = iteration_subjects.paths.size;
iteration_subjects.paths.forEach((value, urlhere, map) => {
// URLFIX
let key = urlhere.replace(/\/\.\.\//,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let key = urlhere.replace(/\/\.\.\//g,'/').replace(/\/$/,'').replace(/[.]{1,}$/,'').replace(/\/$/,'');
let content_type = value['headinfo']['content-type'] || 'text/html';
if ( (!visited_pages.has( key ) || resweep) && /^text\/html.*/.test( content_type ) ) {
console.log( "%s page scan to %s", recursive ? "Extending" : "Deferring", key );
Expand Down

0 comments on commit 854e4af

Please sign in to comment.