-
Select Topic AreaQuestion BodyIn my github action (written using Node.js) I need to extract images and videos from the PR to send them to Discord when it is merged (along with the changelog). When sending a request via I also tried sending a request to a direct url (since I also tried to explicitly specify the header value const aws4 = require('aws4');
const https = require('https');
const url = new URL('https://github-production-user-asset-6210df.s3.amazonaws.com/157578255/473711935-c23a2122-2260-499d-aabe-d4c95403d6ef.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAVCODYLSA53PQK4ZA%2F20250809%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20250809T091659Z&X-Amz-Expires=300&X-Amz-Signature=6547f8e485b516ae6c9c8abe7a9757551418049875bd6261e2b72f21b96138c7&X-Amz-SignedHeaders=host');
const opts = {
host: url.host,
path: url.pathname + url.search,
method: 'GET',
};
aws4.sign(opts);
const req = https.request(opts, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
console.log('Status:', res.statusCode);
console.log('Data:', data);
});
});
req.on('error', (err) => {
console.error('Error:', err);
});
req.end();But, I received a 400 response with the following text: Does anyone know how to solve this problem and successfully connect to a URL to download files? |
Beta Was this translation helpful? Give feedback.
Replies: 4 comments 1 reply
-
You’re getting 403/404 because PR attachment links from GitHub are served via short-lived, pre-signed AWS S3 URLs.Here’s what’s going on:Links like How to download them in Node.jsYou don’t need to re-sign or add AWS headers. Just follow the redirects and download the file within the valid time window. import https from 'https';
import { URL } from 'url';
function downloadFile(fileUrl) {
return new Promise((resolve, reject) => {
const url = new URL(fileUrl);
const options = {
hostname: url.hostname,
path: url.pathname + url.search,
method: 'GET',
headers: {
'User-Agent': 'Mozilla/5.0', // avoids some GitHub 403s
}
};
const req = https.request(options, (res) => {
// Handle redirects
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
return resolve(downloadFile(res.headers.location));
}
let data = [];
res.on('data', chunk => data.push(chunk));
res.on('end', () => {
resolve(Buffer.concat(data)); // raw file data
});
});
req.on('error', reject);
req.end();
});
}
// Example usage
(async () => {
try {
const url = 'https://github.com/user-attachments/assets/6ed7869c-2a5a-41d7-94f8-f76bb2c9ddf1';
const fileBuffer = await downloadFile(url);
console.log('Downloaded file size:', fileBuffer.length);
} catch (err) {
console.error('Download failed:', err);
}
})();
If you’re processing PRs in a GitHub Action, you can also use the GitHub API to fetch the PR body/comments. You can also extract all Download them using the above function. |
Beta Was this translation helpful? Give feedback.
-
|
I tried your method, but the images didn't download from the PR... PR link: Kirus59/space-station-14#11 Used in this way: import fs from 'fs';
import util from 'util';
const writeFileAsync = util.promisify(fs.writeFile);
function httpsReq(fileUrl){
return new Promise((resolve, reject) => {
const url = new URL(fileUrl);
const options = {
hostname: url.hostname,
path: url.pathname + url.search,
method: 'GET',
headers: {
'User-Agent': 'Mozilla/5.0', // avoids some GitHub 403s
}
};
const req = https.request(options, (res) => {
// Handle redirects
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
return resolve(httpsReq(res.headers.location));
}
let data = [];
res.on('data', chunk => data.push(chunk));
res.on('end', () => {
resolve(Buffer.concat(data)); // raw file data
});
});
req.on('error', reject);
req.end();
});
}
async function downloadMedia(url, outputFolder, recursive = true){
if (!fs.existsSync(outputFolder)){
fs.mkdirSync(outputFolder, { recursive: true });
}
try {
const fileBuffer = await httpsReq(url);
console.log('Downloaded file size:', fileBuffer.length);
const savePath = path.join(__dirname, 'test.png');
await writeFileAsync(savePath, fileBuffer);
return new MediaData('test.png', 'image', fileBuffer.length);
}
catch (err){
console.error('Download failed:', err);
}
return null;
} |
Beta Was this translation helpful? Give feedback.
-
|
Hi @Kirus59, In your example PR ( Why your current code isn’t workingYou’re not fetching the PR body/comments via the GitHub API. Passing the PR’s web URL downloads HTML, not the media file. You need the exact Fixed approach
Example: import https from 'https';
import fs from 'fs';
import path from 'path';
import { Octokit } from '@octokit/rest';
const octokit = new Octokit({
auth: process.env.GITHUB_TOKEN // repo read permissions required
});
async function getPRMediaLinks(owner, repo, prNumber) {
const { data: pr } = await octokit.pulls.get({
owner,
repo,
pull_number: prNumber
});
const body = pr.body || '';
const regex = /(https:\/\/github\.com\/user-attachments\/[^\s)]+)/g;
return [...body.matchAll(regex)].map(match => match[1]);
}
function httpsReq(fileUrl) {
return new Promise((resolve, reject) => {
const url = new URL(fileUrl);
const options = {
hostname: url.hostname,
path: url.pathname + url.search,
method: 'GET',
headers: { 'User-Agent': 'Mozilla/5.0' }
};
const req = https.request(options, res => {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
return resolve(httpsReq(res.headers.location));
}
let data = [];
res.on('data', chunk => data.push(chunk));
res.on('end', () => resolve(Buffer.concat(data)));
});
req.on('error', reject);
req.end();
});
}
async function downloadMedia(owner, repo, prNumber) {
const links = await getPRMediaLinks(owner, repo, prNumber);
for (let i = 0; i < links.length; i++) {
const buffer = await httpsReq(links[i]);
const savePath = path.join(process.cwd(), `image-${i}.png`);
fs.writeFileSync(savePath, buffer);
console.log(`Saved ${savePath} (${buffer.length} bytes)`);
}
}
// Example run
downloadMedia('Kirus59', 'space-station-14', 11);TLDRDon’t request the PR page directly, use the API to get markdown. Extract all If you want, this can be wrapped into a GitHub Action so that it runs when a PR is merged, then pulls all media from the PR body, and then sends them directly to Discord. That would make the process fully automatic. |
Beta Was this translation helpful? Give feedback.
-
|
@Anipaleja I fixed the regex and now everything works, thank you so much for your help |
Beta Was this translation helpful? Give feedback.


You’re getting 403/404 because PR attachment links from GitHub are served via short-lived, pre-signed AWS S3 URLs.
Here’s what’s going on:
Links like
https://github.com/user-attachments/assets/...are redirects to AWS S3. The redirected URL already contains authentication in its query string (X-Amz-Algorithm, X-Amz-Signature, etc.). These URLs expire in ~5–10 minutes. If you try to hit the raw S3 domain without following the redirect, you’ll get403 AccessDenied. If you try to sign them again (e.g., with aws4), you’ll get Only one auth mechanism allowed. If your request code doesn’t follow redirects, the originalgithub.com/user-attachments/...will return404.How to download them in Nod…