Skip to content

Commit 1011b2f

Browse files
dkirov-ddclaude
andcommitted
Add authenticated wheel downloads with aws-vault integration
Since S3 bucket cannot be made public, reverted to authenticated downloads: Upload changes (release.py): - Pointer files: public-read (for TUF access) - Wheel files: private (requires AWS authentication) Downloader changes: - Added boto3 back for authenticated S3 downloads - Added inline aws-vault integration to CLI - Downloads wheels using boto3.client('s3').get_object() - Verifies wheel digest after download CLI integration: - Added --aws-vault-profile option - Auto-detects AWS credentials - Re-execs with aws-vault if credentials missing - Uses default profile: sso-agent-integrations-dev-account-admin Usage: # With explicit profile datadog-checks-downloader datadog-postgres --aws-vault-profile my-profile # Auto-detect (will use aws-vault if no credentials) datadog-checks-downloader datadog-postgres 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <[email protected]>
1 parent ca2a2d6 commit 1011b2f

File tree

4 files changed

+75
-9
lines changed

4 files changed

+75
-9
lines changed

datadog_checks_dev/datadog_checks/dev/tooling/release.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -266,21 +266,21 @@ def upload_package(package_path, version, public=False):
266266
raise
267267
# Doesn't exist, proceed with upload
268268

269-
# Upload pointer file (public for TUF)
269+
# Upload pointer file with metadata (public for TUF access)
270270
s3.upload_file(
271271
pointer_file_path,
272272
S3_BUCKET,
273273
pointer_s3_key,
274274
ExtraArgs={'Metadata': {'digest': wheel_hash, 'version': version}, 'ACL': 'public-read'},
275275
)
276276

277-
# Upload wheel file with hash metadata (public for direct download)
277+
# Upload wheel file with hash metadata (private, requires authentication)
278278
wheel_s3_key = f"simple/{package_name}/{wheel_file_name}"
279279
s3.upload_file(
280280
wheel_file_path,
281281
S3_BUCKET,
282282
wheel_s3_key,
283-
ExtraArgs={'Metadata': {'sha256': wheel_hash}, 'ACL': 'public-read'},
283+
ExtraArgs={'Metadata': {'sha256': wheel_hash}},
284284
)
285285

286286
print(f"Uploaded {pointer_file_name} and {wheel_file_name} to S3 bucket {S3_BUCKET}")

datadog_checks_downloader/datadog_checks/downloader/cli.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import argparse
88
import os
99
import re
10+
import subprocess
1011
import sys
1112

1213
# 2nd party.
@@ -21,6 +22,38 @@
2122
# Private module functions.
2223

2324

25+
def __check_aws_credentials():
26+
"""Check if AWS credentials are available."""
27+
import boto3
28+
try:
29+
sts = boto3.client('sts')
30+
sts.get_caller_identity()
31+
return True
32+
except Exception:
33+
return False
34+
35+
36+
def __exec_with_aws_vault(profile, command_args):
37+
"""Execute command with aws-vault profile."""
38+
aws_vault_cmd = ['aws-vault', 'exec', profile, '--'] + command_args
39+
sys.stderr.write(f"Re-executing with aws-vault profile: {profile}\n")
40+
sys.stderr.flush()
41+
os.execvp('aws-vault', aws_vault_cmd)
42+
43+
44+
def __ensure_aws_credentials(profile=None):
45+
"""Ensure AWS credentials are available, re-exec with aws-vault if needed."""
46+
if profile:
47+
# User explicitly specified a profile, re-exec with aws-vault
48+
__exec_with_aws_vault(profile, sys.argv)
49+
elif not __check_aws_credentials():
50+
# No credentials available, try to find default profile
51+
default_profile = os.environ.get('AWS_VAULT_PROFILE') or 'sso-agent-integrations-dev-account-admin'
52+
sys.stderr.write(f"No AWS credentials found. Using aws-vault profile: {default_profile}\n")
53+
sys.stderr.flush()
54+
__exec_with_aws_vault(default_profile, sys.argv)
55+
56+
2457
def __is_canonical(version):
2558
"""
2659
https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
@@ -100,8 +133,18 @@ def instantiate_downloader():
100133
'-v', '--verbose', action='count', default=0, help='Show verbose information about TUF.'
101134
)
102135

136+
parser.add_argument(
137+
'--aws-vault-profile',
138+
type=str,
139+
default=None,
140+
help='AWS Vault profile to use for authentication. If not specified, will auto-detect or re-exec with aws-vault if needed.',
141+
)
142+
103143
args = parser.parse_args()
104144

145+
# Ensure AWS credentials are available (will re-exec with aws-vault if needed)
146+
__ensure_aws_credentials(profile=args.aws_vault_profile)
147+
105148
repository_url_prefix = args.repository
106149
standard_distribution_name = args.standard_distribution_name
107150
version = args.version

datadog_checks_downloader/datadog_checks/downloader/download.py

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import urllib.parse
1414
import urllib.request
1515

16+
import boto3
1617
import yaml
1718

1819
from packaging.version import parse as parse_version
@@ -167,6 +168,8 @@ def __download_wheel_from_pointer(self, pointer_abspath, standard_distribution_n
167168
Returns:
168169
Absolute path to downloaded wheel file
169170
"""
171+
from urllib.parse import urlparse
172+
170173
# Parse pointer file
171174
with open(pointer_abspath, 'rb') as f:
172175
pointer_bytes = f.read()
@@ -185,15 +188,34 @@ def __download_wheel_from_pointer(self, pointer_abspath, standard_distribution_n
185188
# Extract wheel filename from URI
186189
wheel_filename = wheel_uri.split('/')[-1]
187190

188-
# Download wheel directly from public URI
191+
# Download wheel from S3 with authentication
189192
logger.info(f'Downloading wheel from: {wheel_uri}')
190193
wheel_abspath = os.path.join(self.__targets_dir, 'simple', standard_distribution_name, wheel_filename)
191194
os.makedirs(os.path.dirname(wheel_abspath), exist_ok=True)
192195

193196
try:
194-
# Download from public URI
195-
with urllib.request.urlopen(wheel_uri) as resp:
196-
wheel_bytes = resp.read()
197+
# Parse S3 URI to extract bucket and key
198+
# Example: https://test-public-integration-wheels.s3.eu-north-1.amazonaws.com/simple/datadog-postgres/wheel.whl
199+
parsed = urlparse(wheel_uri)
200+
201+
# Extract bucket name from hostname (format: bucket.s3.region.amazonaws.com)
202+
bucket_name = parsed.hostname.split('.')[0]
203+
204+
# Extract S3 key (path without leading /)
205+
s3_key = parsed.path.lstrip('/')
206+
207+
# Extract region from hostname if present
208+
if '.s3.' in parsed.hostname and '.amazonaws.com' in parsed.hostname:
209+
region = parsed.hostname.split('.s3.')[1].split('.amazonaws.com')[0]
210+
else:
211+
region = None
212+
213+
logger.debug(f'Parsed S3 URI: bucket={bucket_name}, key={s3_key}, region={region}')
214+
215+
# Use boto3 to download with AWS credentials
216+
s3_client = boto3.client('s3', region_name=region)
217+
response = s3_client.get_object(Bucket=bucket_name, Key=s3_key)
218+
wheel_bytes = response['Body'].read()
197219

198220
# Verify digest
199221
actual_digest = hashlib.sha256(wheel_bytes).hexdigest()
@@ -209,8 +231,8 @@ def __download_wheel_from_pointer(self, pointer_abspath, standard_distribution_n
209231
logger.info(f'Wheel verified and saved: {wheel_abspath}')
210232
return wheel_abspath
211233

212-
except urllib.error.HTTPError as err:
213-
logger.error('GET %s: %s', wheel_uri, err)
234+
except Exception as err:
235+
logger.error('Failed to download wheel from %s: %s', wheel_uri, err)
214236
raise
215237

216238
def download(self, target_relpath):

datadog_checks_downloader/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ license = "BSD-3-Clause"
3333

3434
[project.optional-dependencies]
3535
deps = [
36+
"boto3>=1.40.0",
3637
"in-toto==2.0.0",
3738
"packaging==25.0",
3839
"securesystemslib[crypto,pynacl]==0.28.0",

0 commit comments

Comments
 (0)