Skip to content

Commit

Permalink
Merge pull request #1385 from r15ch13/referer-header
Browse files Browse the repository at this point in the history
Add referer-header to all webrequests
  • Loading branch information
lukesampson authored Mar 21, 2017
2 parents 1fc5d8b + 9a65079 commit 29f69b2
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 4 deletions.
1 change: 1 addition & 0 deletions bin/checkver.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ $queue | % {
jsonpath = $jsonpath;
}

$wc.headers.add('Referer', (strip_filename $url))
$wc.downloadstringasync($url, $state)
}

Expand Down
14 changes: 10 additions & 4 deletions lib/autoupdate.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function check_url([String] $url) {
}

try {
$response = Invoke-WebRequest -Uri $url -Method HEAD
$response = Invoke-WebRequest -Uri $url -Method HEAD -Headers @{'Referer' = strip_filename $url}
return ($response -and $response.StatusCode.Equals(200)) # redirects might be ok
} catch [system.net.webexception] {
write-host -f darkred $_
Expand All @@ -42,7 +42,9 @@ function find_hash_in_rdf([String] $url, [String] $filename) {
$data = ""
try {
# Download and parse RDF XML file
[xml]$data = (new-object net.webclient).downloadstring($url)
$wc = new-object net.webclient
$wc.headers.add('Referer', (strip_filename $url))
[xml]$data = $wc.downloadstring($url)
} catch [system.net.webexception] {
write-host -f darkred $_
write-host -f darkred "URL $url is not valid"
Expand All @@ -59,7 +61,9 @@ function find_hash_in_textfile([String] $url, [String] $basename, [String] $type
$hashfile = $null

try {
$hashfile = (new-object net.webclient).downloadstring($url)
$wc = new-object net.webclient
$wc.headers.add('Referer', (strip_filename $url))
$hashfile = $wc.downloadstring($url)
} catch [system.net.webexception] {
write-host -f darkred $_
write-host -f darkred "URL $url is not valid"
Expand All @@ -86,7 +90,9 @@ function find_hash_in_json([String] $url, [String] $basename, [String] $jsonpath
$json = $null

try {
$json = (new-object net.webclient).downloadstring($url) | convertfrom-json -ea stop
$wc = new-object net.webclient
$wc.headers.add('Referer', (strip_filename $url))
$json = $wc.downloadstring($url) | convertfrom-json -ea stop
} catch [system.net.webexception] {
write-host -f darkred $_
write-host -f darkred "URL $url is not valid"
Expand Down
2 changes: 2 additions & 0 deletions lib/core.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ function installed_apps($global) {
# paths
function fname($path) { split-path $path -leaf }
function strip_ext($fname) { $fname -replace '\.[^\.]*$', '' }
function strip_filename($path) { $path -replace (fname $path) }

function ensure($dir) { if(!(test-path $dir)) { mkdir $dir > $null }; resolve-path $dir }
function fullpath($path) { # should be ~ rooted
Expand All @@ -94,6 +95,7 @@ function is_local($path) {
function dl($url,$to) {
$wc = new-object system.net.webClient
$wc.headers.add('User-Agent', 'Scoop/1.0')
$wc.headers.add('Referer', (strip_filename $url))
$wc.downloadFile($url,$to)

}
Expand Down
1 change: 1 addition & 0 deletions lib/install.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ function dl($url, $to, $cookies, $progress) {
$wreq = [net.webrequest]::create($url)
if($wreq -is [net.httpwebrequest]) {
$wreq.useragent = 'Scoop/1.0'
$wreq.referer = strip_filename $url
if($cookies) {
$wreq.headers.add('Cookie', (cookie_header $cookies))
}
Expand Down

0 comments on commit 29f69b2

Please sign in to comment.