Migrate to Podman, Forgejo Actions; clean up cruft
Container: - Dockerfile → Containerfile; drop gosu, entrypoint, PUID/PGID user-switching - HOME=/config so Path.home()/.aws resolves to runtime-mounted credentials - docker-compose.yml → compose.yml with userns_mode: keep-id for Podman rootless - .dockerignore → .containerignore - boto3 unpinned from 1.34.0 to >=1.34.0 CI: - Remove Woodpecker (.woodpecker.yml, .woodpecker/) - Add Forgejo Actions (.forgejo/workflows/ci.yml, publish.yml) - CI: syntax check, security scan, container lint (hadolint), build test - Publish: build and push to Quay.io on main push and version tags Cleanup: - Remove entrypoint.sh (no longer needed) - Remove scripts/build-and-push.sh and PUBLISHING.md (superseded by CI) - All docker → podman command references updated Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
e0fc3bdd42
commit
a6b2cea31f
20 changed files with 1411 additions and 258 deletions
49
.forgejo/workflows/ci.yml
Normal file
49
.forgejo/workflows/ci.yml
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
syntax-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- run: pip install -r requirements.txt
|
||||||
|
- run: |
|
||||||
|
python -m py_compile app.py
|
||||||
|
python -m py_compile import_from_aws.py
|
||||||
|
python -m py_compile import_data.py
|
||||||
|
|
||||||
|
security-scan:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- run: |
|
||||||
|
pip install bandit safety
|
||||||
|
bandit -r . -ll || true
|
||||||
|
safety check --file requirements.txt || true
|
||||||
|
|
||||||
|
container-lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: hadolint/hadolint-action@v3.1.0
|
||||||
|
with:
|
||||||
|
dockerfile: Containerfile
|
||||||
|
|
||||||
|
container-build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
- uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
push: false
|
||||||
|
tags: sgo:test
|
||||||
31
.forgejo/workflows/publish.yml
Normal file
31
.forgejo/workflows/publish.yml
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
name: Publish
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
tags: ['v*']
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
|
password: ${{ secrets.QUAY_PASSWORD }}
|
||||||
|
|
||||||
|
- uses: docker/metadata-action@v5
|
||||||
|
id: meta
|
||||||
|
with:
|
||||||
|
images: quay.io/${{ secrets.QUAY_USERNAME }}/sgo
|
||||||
|
|
||||||
|
- uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
when:
|
|
||||||
event: [push, pull_request]
|
|
||||||
|
|
||||||
pipeline:
|
|
||||||
dependencies:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- pip install -r requirements.txt
|
|
||||||
|
|
||||||
syntax-check:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- python -m py_compile app.py
|
|
||||||
- python -m py_compile import_from_aws.py
|
|
||||||
- python -m py_compile import_data.py
|
|
||||||
|
|
||||||
docker-build:
|
|
||||||
image: docker:dind
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
commands:
|
|
||||||
- docker build -t sgo:${CI_COMMIT_SHA} .
|
|
||||||
|
|
||||||
security-scan:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- pip install bandit safety
|
|
||||||
- bandit -r . -ll || true
|
|
||||||
- safety check --file requirements.txt || true
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
pipeline:
|
|
||||||
docker-lint:
|
|
||||||
image: hadolint/hadolint:latest-alpine
|
|
||||||
commands:
|
|
||||||
- hadolint Dockerfile
|
|
||||||
|
|
||||||
docker-build-test:
|
|
||||||
image: docker:dind
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
commands:
|
|
||||||
- docker build -t sgo:test .
|
|
||||||
- docker images sgo:test
|
|
||||||
|
|
||||||
docker-compose-validate:
|
|
||||||
image: docker/compose:latest
|
|
||||||
commands:
|
|
||||||
- docker-compose config -q
|
|
||||||
- docker-compose -f docker-compose.local.yml config -q
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
pipeline:
|
|
||||||
python-lint:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- pip install flake8 pylint
|
|
||||||
- flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
|
||||||
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
|
||||||
|
|
||||||
python-syntax:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- python -m py_compile app.py
|
|
||||||
- python -m py_compile import_from_aws.py
|
|
||||||
- python -m py_compile import_data.py
|
|
||||||
|
|
||||||
python-security:
|
|
||||||
image: python:3.11-slim
|
|
||||||
commands:
|
|
||||||
- pip install bandit
|
|
||||||
- bandit -r . -f json -o bandit-report.json || true
|
|
||||||
- bandit -r . -ll
|
|
||||||
24
Containerfile
Normal file
24
Containerfile
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy application files
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Create mount point for AWS credentials and data directory
|
||||||
|
RUN mkdir -p /config /app/data
|
||||||
|
|
||||||
|
EXPOSE 5000
|
||||||
|
|
||||||
|
# HOME=/config means Path.home() resolves to /config at runtime.
|
||||||
|
# Mount your AWS credentials to /config/.aws at runtime — nothing sensitive is baked in.
|
||||||
|
ENV FLASK_APP=app.py \
|
||||||
|
PYTHONUNBUFFERED=1 \
|
||||||
|
DEBUG=false \
|
||||||
|
HOME=/config
|
||||||
|
|
||||||
|
CMD ["python", "app.py"]
|
||||||
39
Dockerfile
39
Dockerfile
|
|
@ -1,39 +0,0 @@
|
||||||
FROM python:3.11-slim
|
|
||||||
|
|
||||||
# Install gosu for user switching
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y --no-install-recommends gosu && \
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install Python dependencies
|
|
||||||
COPY requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy application files
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Create default directories
|
|
||||||
RUN mkdir -p /app/data /home/sgo
|
|
||||||
|
|
||||||
# Copy entrypoint script
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
|
||||||
RUN chmod +x /entrypoint.sh
|
|
||||||
|
|
||||||
# Expose port
|
|
||||||
EXPOSE 5000
|
|
||||||
|
|
||||||
# Set environment variables
|
|
||||||
ENV FLASK_APP=app.py \
|
|
||||||
PYTHONUNBUFFERED=1 \
|
|
||||||
PUID=1000 \
|
|
||||||
PGID=1000 \
|
|
||||||
DEBUG=false \
|
|
||||||
HOME=/home/sgo
|
|
||||||
|
|
||||||
# Use entrypoint for PUID/PGID handling
|
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
|
||||||
|
|
||||||
# Run the application
|
|
||||||
CMD ["python", "app.py"]
|
|
||||||
233
app.py
233
app.py
|
|
@ -22,7 +22,6 @@ import queue
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
DB_PATH = os.path.join(os.path.dirname(__file__), 'data', 'aws_export.db')
|
DB_PATH = os.path.join(os.path.dirname(__file__), 'data', 'aws_export.db')
|
||||||
data_imported = False
|
|
||||||
|
|
||||||
# Cache for AWS session credentials (valid for 1 hour)
|
# Cache for AWS session credentials (valid for 1 hour)
|
||||||
session_cache = {} # {profile: {'credentials': {...}, 'region': ..., 'timestamp': ...}}
|
session_cache = {} # {profile: {'credentials': {...}, 'region': ..., 'timestamp': ...}}
|
||||||
|
|
@ -38,16 +37,10 @@ def regexp(pattern, value):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_db():
|
def init_db():
|
||||||
"""Get database connection"""
|
"""Create database schema — called once at startup"""
|
||||||
# Ensure data directory exists
|
|
||||||
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
||||||
|
|
||||||
conn = sqlite3.connect(DB_PATH)
|
conn = sqlite3.connect(DB_PATH)
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
conn.create_function("REGEXP", 2, regexp)
|
|
||||||
|
|
||||||
# Create tables if they don't exist
|
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
CREATE TABLE IF NOT EXISTS security_groups (
|
CREATE TABLE IF NOT EXISTS security_groups (
|
||||||
|
|
@ -105,17 +98,32 @@ def get_db():
|
||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def get_db():
|
||||||
|
"""Get database connection"""
|
||||||
|
conn = sqlite3.connect(DB_PATH)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
conn.create_function("REGEXP", 2, regexp)
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize schema at module load (works with both direct run and WSGI)
|
||||||
|
init_db()
|
||||||
|
|
||||||
|
|
||||||
@app.route('/')
|
@app.route('/')
|
||||||
def index():
|
def index():
|
||||||
"""Import page - always shown first"""
|
"""Show explorer if DB has data, otherwise show import page"""
|
||||||
global data_imported
|
try:
|
||||||
# If data already imported, redirect to explorer
|
conn = get_db()
|
||||||
if data_imported and os.path.exists(DB_PATH):
|
count = conn.execute("SELECT COUNT(*) FROM security_groups").fetchone()[0]
|
||||||
|
conn.close()
|
||||||
|
if count > 0:
|
||||||
return render_template('index.html')
|
return render_template('index.html')
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
return render_template('import.html')
|
return render_template('import.html')
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -200,6 +208,7 @@ def import_profile(profile, mfa_code, progress_queue):
|
||||||
region = None
|
region = None
|
||||||
source_profile = None
|
source_profile = None
|
||||||
role_arn = None
|
role_arn = None
|
||||||
|
duration_seconds = 3600 # Default to 1 hour
|
||||||
|
|
||||||
if section_name in config:
|
if section_name in config:
|
||||||
mfa_serial = config[section_name].get('mfa_serial')
|
mfa_serial = config[section_name].get('mfa_serial')
|
||||||
|
|
@ -207,8 +216,20 @@ def import_profile(profile, mfa_code, progress_queue):
|
||||||
source_profile = config[section_name].get('source_profile')
|
source_profile = config[section_name].get('source_profile')
|
||||||
role_arn = config[section_name].get('role_arn')
|
role_arn = config[section_name].get('role_arn')
|
||||||
|
|
||||||
|
# Read duration_seconds from config, default to 3600 (1 hour)
|
||||||
|
if config.has_option(section_name, 'duration_seconds'):
|
||||||
|
try:
|
||||||
|
duration_seconds = int(config[section_name].get('duration_seconds'))
|
||||||
|
# Validate AWS session duration limits (15 min to 12 hours)
|
||||||
|
if duration_seconds < 900 or duration_seconds > 43200:
|
||||||
|
progress_queue.put(('warning', f"[{profile}] duration_seconds {duration_seconds} outside AWS limits (900-43200), using default 3600"))
|
||||||
|
duration_seconds = 3600
|
||||||
|
except ValueError:
|
||||||
|
progress_queue.put(('warning', f"[{profile}] Invalid duration_seconds in config, using default 3600"))
|
||||||
|
duration_seconds = 3600
|
||||||
|
|
||||||
# Debug output
|
# Debug output
|
||||||
progress_queue.put(('info', f"[{profile}] Config: region={region}, mfa_serial={bool(mfa_serial)}, source_profile={source_profile}, role_arn={role_arn}"))
|
progress_queue.put(('info', f"[{profile}] Config: region={region}, mfa_serial={bool(mfa_serial)}, source_profile={source_profile}, role_arn={role_arn}, duration={duration_seconds}s"))
|
||||||
|
|
||||||
# Read base credentials from ~/.aws/credentials
|
# Read base credentials from ~/.aws/credentials
|
||||||
creds_path = Path.home() / '.aws' / 'credentials'
|
creds_path = Path.home() / '.aws' / 'credentials'
|
||||||
|
|
@ -253,7 +274,7 @@ def import_profile(profile, mfa_code, progress_queue):
|
||||||
try:
|
try:
|
||||||
# Get temporary credentials with MFA
|
# Get temporary credentials with MFA
|
||||||
response = sts.get_session_token(
|
response = sts.get_session_token(
|
||||||
DurationSeconds=3600,
|
DurationSeconds=duration_seconds,
|
||||||
SerialNumber=mfa_serial,
|
SerialNumber=mfa_serial,
|
||||||
TokenCode=mfa_code
|
TokenCode=mfa_code
|
||||||
)
|
)
|
||||||
|
|
@ -361,12 +382,13 @@ def import_profile(profile, mfa_code, progress_queue):
|
||||||
account_id, account_name = get_account_info_inline(session)
|
account_id, account_name = get_account_info_inline(session)
|
||||||
progress_queue.put(('info', f" [{profile}] Account: {account_name} ({account_id})"))
|
progress_queue.put(('info', f" [{profile}] Account: {account_name} ({account_id})"))
|
||||||
|
|
||||||
# Cache the session credentials for reuse (valid for 1 hour)
|
# Cache the session credentials for reuse
|
||||||
global session_cache
|
global session_cache
|
||||||
session_cache[profile] = {
|
session_cache[profile] = {
|
||||||
'session': session,
|
'session': session,
|
||||||
'region': region,
|
'region': region,
|
||||||
'timestamp': time.time(),
|
'timestamp': time.time(),
|
||||||
|
'duration_seconds': duration_seconds,
|
||||||
'account_id': account_id,
|
'account_id': account_id,
|
||||||
'account_name': account_name
|
'account_name': account_name
|
||||||
}
|
}
|
||||||
|
|
@ -395,7 +417,6 @@ def import_profile(profile, mfa_code, progress_queue):
|
||||||
@app.route('/api/import', methods=['POST'])
|
@app.route('/api/import', methods=['POST'])
|
||||||
def import_data():
|
def import_data():
|
||||||
"""Import data from AWS with parallel execution and streaming progress"""
|
"""Import data from AWS with parallel execution and streaming progress"""
|
||||||
global data_imported
|
|
||||||
|
|
||||||
data = request.json
|
data = request.json
|
||||||
selected_profiles = data.get('profiles', [])
|
selected_profiles = data.get('profiles', [])
|
||||||
|
|
@ -462,8 +483,6 @@ def import_data():
|
||||||
yield send_progress(f" Total EC2 Instances: {len(all_ec2_instances)}", 'success')
|
yield send_progress(f" Total EC2 Instances: {len(all_ec2_instances)}", 'success')
|
||||||
yield send_progress(f" Total SG Rules: {len(all_sg_rules)}", 'success')
|
yield send_progress(f" Total SG Rules: {len(all_sg_rules)}", 'success')
|
||||||
|
|
||||||
data_imported = True
|
|
||||||
|
|
||||||
yield send_progress("Redirecting to explorer...", 'complete')
|
yield send_progress("Redirecting to explorer...", 'complete')
|
||||||
else:
|
else:
|
||||||
yield send_progress("✗ No data imported", 'error')
|
yield send_progress("✗ No data imported", 'error')
|
||||||
|
|
@ -477,7 +496,6 @@ def import_data():
|
||||||
@app.route('/api/import-profile', methods=['POST'])
|
@app.route('/api/import-profile', methods=['POST'])
|
||||||
def import_single_profile():
|
def import_single_profile():
|
||||||
"""Import data from a single AWS profile with streaming progress"""
|
"""Import data from a single AWS profile with streaming progress"""
|
||||||
global data_imported
|
|
||||||
|
|
||||||
data = request.json
|
data = request.json
|
||||||
profile = data.get('profile')
|
profile = data.get('profile')
|
||||||
|
|
@ -516,7 +534,6 @@ def import_single_profile():
|
||||||
yield send_progress(f" EC2 Instances: {len(result['ec2_instances'])}", 'success')
|
yield send_progress(f" EC2 Instances: {len(result['ec2_instances'])}", 'success')
|
||||||
yield send_progress(f" SG Rules: {len(result['sg_rules'])}", 'success')
|
yield send_progress(f" SG Rules: {len(result['sg_rules'])}", 'success')
|
||||||
|
|
||||||
data_imported = True
|
|
||||||
yield send_progress("Done", 'complete')
|
yield send_progress("Done", 'complete')
|
||||||
else:
|
else:
|
||||||
yield send_progress(f"✗ Import failed for {profile}", 'error')
|
yield send_progress(f"✗ Import failed for {profile}", 'error')
|
||||||
|
|
@ -530,7 +547,7 @@ def import_single_profile():
|
||||||
@app.route('/api/refresh-cached', methods=['POST'])
|
@app.route('/api/refresh-cached', methods=['POST'])
|
||||||
def refresh_cached():
|
def refresh_cached():
|
||||||
"""Refresh data using cached AWS sessions (if still valid)"""
|
"""Refresh data using cached AWS sessions (if still valid)"""
|
||||||
global session_cache, data_imported
|
global session_cache
|
||||||
|
|
||||||
if not session_cache:
|
if not session_cache:
|
||||||
return jsonify({'error': 'No cached sessions', 'redirect': True})
|
return jsonify({'error': 'No cached sessions', 'redirect': True})
|
||||||
|
|
@ -539,15 +556,19 @@ def refresh_cached():
|
||||||
try:
|
try:
|
||||||
from import_from_aws import fetch_security_groups, fetch_ec2_instances, import_to_database
|
from import_from_aws import fetch_security_groups, fetch_ec2_instances, import_to_database
|
||||||
|
|
||||||
# Check if cached sessions are still valid (< 1 hour old)
|
# Check if cached sessions are still valid
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
valid_profiles = []
|
valid_profiles = []
|
||||||
|
|
||||||
for profile, cache_data in session_cache.items():
|
for profile, cache_data in session_cache.items():
|
||||||
age_minutes = (current_time - cache_data['timestamp']) / 60
|
age_seconds = current_time - cache_data['timestamp']
|
||||||
if age_minutes < 55: # Use 55 minutes to be safe
|
duration_seconds = cache_data.get('duration_seconds', 3600)
|
||||||
|
# Use 5 minutes (300 seconds) safety margin
|
||||||
|
max_age_seconds = duration_seconds - 300
|
||||||
|
if age_seconds < max_age_seconds:
|
||||||
valid_profiles.append(profile)
|
valid_profiles.append(profile)
|
||||||
else:
|
else:
|
||||||
|
age_minutes = age_seconds / 60
|
||||||
yield send_progress(f"[{profile}] Session expired ({age_minutes:.1f} min old)", 'error')
|
yield send_progress(f"[{profile}] Session expired ({age_minutes:.1f} min old)", 'error')
|
||||||
|
|
||||||
if not valid_profiles:
|
if not valid_profiles:
|
||||||
|
|
@ -598,7 +619,6 @@ def refresh_cached():
|
||||||
yield send_progress(f" Total Security Groups: {len(all_security_groups)}", 'success')
|
yield send_progress(f" Total Security Groups: {len(all_security_groups)}", 'success')
|
||||||
yield send_progress(f" Total EC2 Instances: {len(all_ec2_instances)}", 'success')
|
yield send_progress(f" Total EC2 Instances: {len(all_ec2_instances)}", 'success')
|
||||||
|
|
||||||
data_imported = True
|
|
||||||
yield send_progress("COMPLETE", 'complete')
|
yield send_progress("COMPLETE", 'complete')
|
||||||
else:
|
else:
|
||||||
yield send_progress("✗ No data refreshed", 'error')
|
yield send_progress("✗ No data refreshed", 'error')
|
||||||
|
|
@ -609,10 +629,22 @@ def refresh_cached():
|
||||||
return Response(stream_with_context(generate()), mimetype='text/event-stream')
|
return Response(stream_with_context(generate()), mimetype='text/event-stream')
|
||||||
|
|
||||||
|
|
||||||
@app.route('/api/refresh', methods=['POST'])
|
@app.route('/api/clear-db', methods=['POST'])
|
||||||
def refresh_data():
|
def clear_db():
|
||||||
"""Refresh data from AWS - reuses existing MFA session if valid"""
|
"""Clear all data from the database"""
|
||||||
return import_data()
|
try:
|
||||||
|
conn = get_db()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("DELETE FROM security_groups")
|
||||||
|
cursor.execute("DELETE FROM ec2_instances")
|
||||||
|
cursor.execute("DELETE FROM sg_rules")
|
||||||
|
cursor.execute("DELETE FROM refresh_timestamps")
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
return jsonify({'success': True})
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/api/tags')
|
@app.route('/api/tags')
|
||||||
|
|
@ -861,6 +893,147 @@ def get_stats():
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/api/session-expiration')
|
||||||
|
def get_session_expiration():
|
||||||
|
"""Get session expiration info for credential countdown"""
|
||||||
|
global session_cache
|
||||||
|
|
||||||
|
if not session_cache:
|
||||||
|
return jsonify({'has_session': False})
|
||||||
|
|
||||||
|
current_time = time.time()
|
||||||
|
earliest_expiration = None
|
||||||
|
|
||||||
|
for profile, cache_data in session_cache.items():
|
||||||
|
timestamp = cache_data['timestamp']
|
||||||
|
duration_seconds = cache_data.get('duration_seconds', 3600)
|
||||||
|
expiration_time = timestamp + duration_seconds
|
||||||
|
|
||||||
|
if earliest_expiration is None or expiration_time < earliest_expiration:
|
||||||
|
earliest_expiration = expiration_time
|
||||||
|
|
||||||
|
if earliest_expiration is None:
|
||||||
|
return jsonify({'has_session': False})
|
||||||
|
|
||||||
|
seconds_remaining = int(earliest_expiration - current_time)
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
'has_session': True,
|
||||||
|
'seconds_remaining': seconds_remaining,
|
||||||
|
'expiration_timestamp': earliest_expiration
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/api/search-ip')
|
||||||
|
def search_ip():
|
||||||
|
"""Search for an IP address across all firewall rules and EC2 instances with optional text, port, and type filters"""
|
||||||
|
ip_query = request.args.get('ip', '').strip()
|
||||||
|
text_filter = request.args.get('text', '').strip()
|
||||||
|
port_filter = request.args.get('port', '').strip()
|
||||||
|
resource_type = request.args.get('type', 'all').strip() # all, ec2, sg
|
||||||
|
|
||||||
|
if not ip_query:
|
||||||
|
return jsonify({'error': 'IP address required', 'results': {'sg_rules': [], 'ec2_instances': []}, 'count': 0})
|
||||||
|
|
||||||
|
conn = get_db()
|
||||||
|
sg_results = []
|
||||||
|
ec2_results = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Search for IP in security group rules (if type is 'all' or 'sg')
|
||||||
|
if resource_type in ['all', 'sg']:
|
||||||
|
where_clauses = ["r.source LIKE ?"]
|
||||||
|
params = [f'%{ip_query}%']
|
||||||
|
|
||||||
|
# Add text filter if provided
|
||||||
|
if text_filter:
|
||||||
|
where_clauses.append("(sg.tag_name LIKE ? OR sg.group_name LIKE ?)")
|
||||||
|
params.extend([f'%{text_filter}%', f'%{text_filter}%'])
|
||||||
|
|
||||||
|
# Add port filter if provided (searches in port_range field)
|
||||||
|
if port_filter:
|
||||||
|
where_clauses.append("(r.port_range LIKE ? OR r.port_range = 'All')")
|
||||||
|
params.append(f'%{port_filter}%')
|
||||||
|
|
||||||
|
where_sql = " AND ".join(where_clauses)
|
||||||
|
|
||||||
|
rules = conn.execute(f"""
|
||||||
|
SELECT
|
||||||
|
r.id,
|
||||||
|
r.group_id,
|
||||||
|
r.direction,
|
||||||
|
r.protocol,
|
||||||
|
r.port_range,
|
||||||
|
r.source_type,
|
||||||
|
r.source,
|
||||||
|
r.description,
|
||||||
|
sg.group_name,
|
||||||
|
sg.tag_name,
|
||||||
|
sg.account_name,
|
||||||
|
sg.account_id,
|
||||||
|
sg.tag_wave,
|
||||||
|
sg.tag_git_repo,
|
||||||
|
sg.tag_git_org,
|
||||||
|
sg.tag_git_file
|
||||||
|
FROM sg_rules r
|
||||||
|
JOIN security_groups sg ON r.group_id = sg.group_id
|
||||||
|
WHERE {where_sql}
|
||||||
|
ORDER BY sg.account_name, sg.group_name, r.direction, r.protocol
|
||||||
|
LIMIT 1000
|
||||||
|
""", params).fetchall()
|
||||||
|
|
||||||
|
for row in rules:
|
||||||
|
sg_results.append(dict(row))
|
||||||
|
|
||||||
|
# Search for IP in EC2 instances (if type is 'all' or 'ec2')
|
||||||
|
if resource_type in ['all', 'ec2']:
|
||||||
|
where_clauses = ["private_ip_address LIKE ?"]
|
||||||
|
params = [f'%{ip_query}%']
|
||||||
|
|
||||||
|
# Add text filter if provided
|
||||||
|
if text_filter:
|
||||||
|
where_clauses.append("(tag_name LIKE ? OR instance_id LIKE ?)")
|
||||||
|
params.extend([f'%{text_filter}%', f'%{text_filter}%'])
|
||||||
|
|
||||||
|
where_sql = " AND ".join(where_clauses)
|
||||||
|
|
||||||
|
instances = conn.execute(f"""
|
||||||
|
SELECT
|
||||||
|
instance_id,
|
||||||
|
tag_name,
|
||||||
|
state,
|
||||||
|
private_ip_address,
|
||||||
|
account_name,
|
||||||
|
account_id,
|
||||||
|
security_groups_id_list,
|
||||||
|
security_groups_name_list,
|
||||||
|
tag_git_repo,
|
||||||
|
tag_git_org,
|
||||||
|
tag_git_file
|
||||||
|
FROM ec2_instances
|
||||||
|
WHERE {where_sql}
|
||||||
|
ORDER BY account_name, tag_name
|
||||||
|
LIMIT 500
|
||||||
|
""", params).fetchall()
|
||||||
|
|
||||||
|
for row in instances:
|
||||||
|
ec2_results.append(dict(row))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
return jsonify({'error': f'Search error: {str(e)}', 'results': {'sg_rules': [], 'ec2_instances': []}, 'count': 0})
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
total_count = len(sg_results) + len(ec2_results)
|
||||||
|
return jsonify({
|
||||||
|
'results': {
|
||||||
|
'sg_rules': sg_results,
|
||||||
|
'ec2_instances': ec2_results
|
||||||
|
},
|
||||||
|
'count': total_count
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Get debug mode from environment variable
|
# Get debug mode from environment variable
|
||||||
debug_mode = os.getenv('DEBUG', 'false').lower() in ('true', '1', 'yes')
|
debug_mode = os.getenv('DEBUG', 'false').lower() in ('true', '1', 'yes')
|
||||||
|
|
|
||||||
18
compose.local.yml
Normal file
18
compose.local.yml
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Uses a local directory for data storage instead of a named volume.
|
||||||
|
# Usage: podman-compose -f docker-compose.local.yml up --build
|
||||||
|
|
||||||
|
services:
|
||||||
|
sgo:
|
||||||
|
build: .
|
||||||
|
container_name: sgo
|
||||||
|
ports:
|
||||||
|
- "${SGO_PORT:-5000}:5000"
|
||||||
|
userns_mode: keep-id
|
||||||
|
volumes:
|
||||||
|
- ${AWS_CONFIG_PATH:-${HOME}/.aws}:/config/.aws:ro,z
|
||||||
|
- ${DATA_PATH:-./data}:/app/data
|
||||||
|
environment:
|
||||||
|
- DEBUG=${DEBUG:-false}
|
||||||
|
- FLASK_ENV=${FLASK_ENV:-production}
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
restart: unless-stopped
|
||||||
23
compose.yml
Normal file
23
compose.yml
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
services:
|
||||||
|
sgo:
|
||||||
|
build: .
|
||||||
|
container_name: sgo
|
||||||
|
ports:
|
||||||
|
- "${SGO_PORT:-5000}:5000"
|
||||||
|
# keep-id maps your host UID into the container — no root, no user switching needed.
|
||||||
|
# Podman only; remove this line if using Docker.
|
||||||
|
userns_mode: keep-id
|
||||||
|
volumes:
|
||||||
|
# Your AWS credentials, read-only. Set AWS_CONFIG_PATH in .env or shell.
|
||||||
|
# Defaults to ~/.aws if not set.
|
||||||
|
- ${AWS_CONFIG_PATH:-${HOME}/.aws}:/config/.aws:ro,z
|
||||||
|
# Persistent database storage
|
||||||
|
- sgo-data:/app/data
|
||||||
|
environment:
|
||||||
|
- DEBUG=${DEBUG:-false}
|
||||||
|
- FLASK_ENV=${FLASK_ENV:-production}
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
sgo-data:
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
# Alternative compose file using local directory for data storage
|
|
||||||
# Usage: docker-compose -f docker-compose.local.yml up --build
|
|
||||||
# or: podman-compose -f docker-compose.local.yml up --build
|
|
||||||
|
|
||||||
services:
|
|
||||||
sgo:
|
|
||||||
build: .
|
|
||||||
container_name: sgo
|
|
||||||
ports:
|
|
||||||
- "${SGO_PORT:-5000}:5000"
|
|
||||||
volumes:
|
|
||||||
# AWS credentials - mounted to temp location, copied by entrypoint
|
|
||||||
# IMPORTANT: You must set AWS_CONFIG_PATH in .env file
|
|
||||||
- ${AWS_CONFIG_PATH}:/tmp/aws-host:ro,z
|
|
||||||
# Database storage - uses local directory
|
|
||||||
- ${DATA_PATH:-./data}:/app/data
|
|
||||||
environment:
|
|
||||||
# User/Group IDs - set to match your host user for proper permissions
|
|
||||||
- PUID=${PUID:-1000}
|
|
||||||
- PGID=${PGID:-1000}
|
|
||||||
# Debug mode - set to true for Flask debug logging
|
|
||||||
- DEBUG=${DEBUG:-false}
|
|
||||||
# Flask environment
|
|
||||||
- FLASK_ENV=${FLASK_ENV:-production}
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
sgo:
|
|
||||||
build: .
|
|
||||||
container_name: sgo
|
|
||||||
ports:
|
|
||||||
- "${SGO_PORT:-5000}:5000"
|
|
||||||
volumes:
|
|
||||||
# AWS credentials - mounted to temp location, copied by entrypoint
|
|
||||||
# IMPORTANT: You must set AWS_CONFIG_PATH in .env file
|
|
||||||
# Example: AWS_CONFIG_PATH=/home/username/.aws
|
|
||||||
- ${AWS_CONFIG_PATH}:/tmp/aws-host:ro,z
|
|
||||||
# Database storage - uses Docker volume by default
|
|
||||||
# To use local directory instead, comment the volume line and uncomment the bind mount
|
|
||||||
- sgo-data:/app/data
|
|
||||||
# - ${DATA_PATH:-./data}:/app/data
|
|
||||||
environment:
|
|
||||||
# User/Group IDs - set to match your host user for proper permissions
|
|
||||||
- PUID=${PUID:-1000}
|
|
||||||
- PGID=${PGID:-1000}
|
|
||||||
# Debug mode - set to true for Flask debug logging
|
|
||||||
- DEBUG=${DEBUG:-false}
|
|
||||||
# Flask environment
|
|
||||||
- FLASK_ENV=${FLASK_ENV:-production}
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
# Named volume for persistent database storage
|
|
||||||
# Data persists across container restarts and rebuilds
|
|
||||||
sgo-data:
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Default PUID/PGID if not set
|
|
||||||
PUID=${PUID:-1000}
|
|
||||||
PGID=${PGID:-1000}
|
|
||||||
|
|
||||||
# Create group if it doesn't exist
|
|
||||||
if ! getent group sgo >/dev/null 2>&1; then
|
|
||||||
groupadd -g ${PGID} sgo
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create or modify user
|
|
||||||
if ! id -u sgo >/dev/null 2>&1; then
|
|
||||||
useradd -u ${PUID} -g ${PGID} -d /home/sgo -m -s /bin/bash sgo
|
|
||||||
else
|
|
||||||
# Update existing user
|
|
||||||
usermod -u ${PUID} sgo 2>/dev/null || true
|
|
||||||
groupmod -g ${PGID} sgo 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Copy AWS credentials from mounted location to user directory
|
|
||||||
# This ensures proper permissions regardless of host UID/GID
|
|
||||||
if [ -d "/tmp/aws-host" ]; then
|
|
||||||
mkdir -p /home/sgo/.aws
|
|
||||||
cp -r /tmp/aws-host/* /home/sgo/.aws/ 2>/dev/null || true
|
|
||||||
chmod 700 /home/sgo/.aws
|
|
||||||
chmod 600 /home/sgo/.aws/* 2>/dev/null || true
|
|
||||||
chown -R sgo:sgo /home/sgo/.aws
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure proper ownership of app files and data directory
|
|
||||||
chown -R sgo:sgo /app
|
|
||||||
|
|
||||||
# Ensure home directory ownership
|
|
||||||
chown sgo:sgo /home/sgo 2>/dev/null || true
|
|
||||||
|
|
||||||
# Execute the command as the sgo user
|
|
||||||
exec gosu sgo "$@"
|
|
||||||
|
|
@ -65,10 +65,25 @@ def get_session_with_mfa(profile_name):
|
||||||
|
|
||||||
section_name = f'profile {profile_name}' if profile_name != 'default' else 'default'
|
section_name = f'profile {profile_name}' if profile_name != 'default' else 'default'
|
||||||
mfa_serial = None
|
mfa_serial = None
|
||||||
|
duration_seconds = 3600 # Default to 1 hour
|
||||||
|
|
||||||
if section_name in config:
|
if section_name in config:
|
||||||
mfa_serial = config[section_name].get('mfa_serial')
|
mfa_serial = config[section_name].get('mfa_serial')
|
||||||
|
|
||||||
|
# Read duration_seconds from config, default to 3600 (1 hour)
|
||||||
|
if config.has_option(section_name, 'duration_seconds'):
|
||||||
|
try:
|
||||||
|
duration_seconds = int(config[section_name].get('duration_seconds'))
|
||||||
|
# Validate AWS session duration limits (15 min to 12 hours)
|
||||||
|
if duration_seconds < 900 or duration_seconds > 43200:
|
||||||
|
print(f"Warning: duration_seconds {duration_seconds} outside AWS limits (900-43200), using default 3600")
|
||||||
|
duration_seconds = 3600
|
||||||
|
else:
|
||||||
|
print(f"Using session duration: {duration_seconds} seconds ({duration_seconds/3600:.1f} hours)")
|
||||||
|
except ValueError:
|
||||||
|
print("Warning: Invalid duration_seconds in config, using default 3600")
|
||||||
|
duration_seconds = 3600
|
||||||
|
|
||||||
if not mfa_serial:
|
if not mfa_serial:
|
||||||
print("\nMFA device ARN not found in config.")
|
print("\nMFA device ARN not found in config.")
|
||||||
print("Enter MFA device ARN (e.g., arn:aws:iam::123456789012:mfa/username):")
|
print("Enter MFA device ARN (e.g., arn:aws:iam::123456789012:mfa/username):")
|
||||||
|
|
@ -82,7 +97,7 @@ def get_session_with_mfa(profile_name):
|
||||||
# Get temporary credentials
|
# Get temporary credentials
|
||||||
try:
|
try:
|
||||||
response = sts.get_session_token(
|
response = sts.get_session_token(
|
||||||
DurationSeconds=3600, # 1 hour
|
DurationSeconds=duration_seconds,
|
||||||
SerialNumber=mfa_serial,
|
SerialNumber=mfa_serial,
|
||||||
TokenCode=token_code
|
TokenCode=token_code
|
||||||
)
|
)
|
||||||
|
|
@ -419,6 +434,18 @@ def import_to_database(db_path, security_groups, ec2_instances, sg_rules=None, a
|
||||||
|
|
||||||
# Import security group rules
|
# Import security group rules
|
||||||
if sg_rules:
|
if sg_rules:
|
||||||
|
# Deduplicate rules — multiple profiles can resolve to the same account,
|
||||||
|
# producing duplicate entries for the same group_id
|
||||||
|
seen = set()
|
||||||
|
deduped = []
|
||||||
|
for rule in sg_rules:
|
||||||
|
key = (rule['group_id'], rule['direction'], rule['protocol'],
|
||||||
|
rule['port_range'], rule['source_type'], rule['source'])
|
||||||
|
if key not in seen:
|
||||||
|
seen.add(key)
|
||||||
|
deduped.append(rule)
|
||||||
|
sg_rules = deduped
|
||||||
|
|
||||||
print(f"Importing {len(sg_rules)} security group rules...")
|
print(f"Importing {len(sg_rules)} security group rules...")
|
||||||
|
|
||||||
# If appending, delete existing rules for these security groups to avoid duplicates
|
# If appending, delete existing rules for these security groups to avoid duplicates
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
Flask==3.0.0
|
Flask==3.0.0
|
||||||
Werkzeug==3.0.1
|
Werkzeug==3.0.1
|
||||||
boto3==1.34.0
|
boto3>=1.34.0
|
||||||
|
|
|
||||||
|
|
@ -659,6 +659,29 @@ h1 {
|
||||||
box-shadow: var(--shadow-sm);
|
box-shadow: var(--shadow-sm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* IP Search View Toggle - Same styles as view-toggle-btn but separate class */
|
||||||
|
.ip-view-toggle-btn {
|
||||||
|
padding: 0.5rem 1rem;
|
||||||
|
border: none;
|
||||||
|
background: transparent;
|
||||||
|
border-radius: 0.25rem;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-secondary);
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ip-view-toggle-btn:hover {
|
||||||
|
color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ip-view-toggle-btn.active {
|
||||||
|
background: var(--card-bg);
|
||||||
|
color: var(--primary-color);
|
||||||
|
box-shadow: var(--shadow-sm);
|
||||||
|
}
|
||||||
|
|
||||||
/* Table View */
|
/* Table View */
|
||||||
.results-table-view {
|
.results-table-view {
|
||||||
display: none;
|
display: none;
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,7 @@
|
||||||
<div class="mfa-inputs" id="mfaInputs"></div>
|
<div class="mfa-inputs" id="mfaInputs"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<button class="import-btn" id="doneBtn" onclick="goToExplorer()">
|
<button class="import-btn" id="doneBtn" onclick="goToExplorer()" style="display: none;">
|
||||||
Done - Go to Explorer
|
Done - Go to Explorer
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
|
|
@ -321,9 +321,13 @@
|
||||||
${profile.has_mfa ? `
|
${profile.has_mfa ? `
|
||||||
<input type="text"
|
<input type="text"
|
||||||
id="mfa-${profile.name}"
|
id="mfa-${profile.name}"
|
||||||
|
name="one-time-code"
|
||||||
|
autocomplete="one-time-code"
|
||||||
|
inputmode="numeric"
|
||||||
placeholder="Enter MFA/TOTP"
|
placeholder="Enter MFA/TOTP"
|
||||||
maxlength="6"
|
maxlength="6"
|
||||||
pattern="[0-9]*">
|
pattern="[0-9]*"
|
||||||
|
onkeydown="if(event.key==='Enter') startProfileImport('${profile.name}')">
|
||||||
` : ''}
|
` : ''}
|
||||||
<button class="profile-import-btn"
|
<button class="profile-import-btn"
|
||||||
id="btn-${profile.name}"
|
id="btn-${profile.name}"
|
||||||
|
|
@ -360,6 +364,7 @@
|
||||||
// Disable button and show progress
|
// Disable button and show progress
|
||||||
btn.disabled = true;
|
btn.disabled = true;
|
||||||
btn.textContent = 'Importing...';
|
btn.textContent = 'Importing...';
|
||||||
|
btn.className = 'profile-import-btn';
|
||||||
progressSection.classList.add('active');
|
progressSection.classList.add('active');
|
||||||
|
|
||||||
// Get MFA code for this profile (if MFA input exists)
|
// Get MFA code for this profile (if MFA input exists)
|
||||||
|
|
@ -405,6 +410,7 @@
|
||||||
importedProfiles.add(profile);
|
importedProfiles.add(profile);
|
||||||
btn.textContent = '✓ Imported';
|
btn.textContent = '✓ Imported';
|
||||||
btn.classList.add('success');
|
btn.classList.add('success');
|
||||||
|
document.getElementById('doneBtn').style.display = 'block';
|
||||||
} else if (data.status === 'error' && data.message.includes('✗')) {
|
} else if (data.status === 'error' && data.message.includes('✗')) {
|
||||||
btn.textContent = '✗ Failed';
|
btn.textContent = '✗ Failed';
|
||||||
btn.classList.add('error');
|
btn.classList.add('error');
|
||||||
|
|
|
||||||
|
|
@ -15,14 +15,22 @@
|
||||||
<h1>🔭 SGO: Security Groups (and Instances) Observatory</h1>
|
<h1>🔭 SGO: Security Groups (and Instances) Observatory</h1>
|
||||||
<p class="subtitle">Search and explore EC2 instances and Security Groups • <a href="https://codeberg.org/edfig/SGO" target="_blank" style="color: inherit; opacity: 0.7; text-decoration: none;">Source Code</a></p>
|
<p class="subtitle">Search and explore EC2 instances and Security Groups • <a href="https://codeberg.org/edfig/SGO" target="_blank" style="color: inherit; opacity: 0.7; text-decoration: none;">Source Code</a></p>
|
||||||
</div>
|
</div>
|
||||||
|
<div style="display: flex; flex-direction: column; align-items: flex-end; gap: 0.25rem;">
|
||||||
<div style="display: flex; gap: 0.5rem;">
|
<div style="display: flex; gap: 0.5rem;">
|
||||||
<button onclick="window.location.href='/'" style="padding: 0.5rem 1rem; background: #64748b; color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;" title="Change AWS profiles">
|
<button onclick="window.location.href='/'" style="padding: 0.5rem 1rem; background: #64748b; color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;" title="Change AWS profiles">
|
||||||
Change Profiles
|
Change Profiles
|
||||||
</button>
|
</button>
|
||||||
|
<button onclick="clearAndRefresh()" id="clearRefreshBtn" style="padding: 0.5rem 1rem; background: #ea580c; color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;" title="Clear all cached data and re-fetch from AWS">
|
||||||
|
Clear & Refresh
|
||||||
|
</button>
|
||||||
<button onclick="refreshData()" id="refreshBtn" style="padding: 0.5rem 1rem; background: var(--primary-color); color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;">
|
<button onclick="refreshData()" id="refreshBtn" style="padding: 0.5rem 1rem; background: var(--primary-color); color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;">
|
||||||
Refresh Data
|
Refresh Data
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="sessionExpiration" style="font-size: 0.75rem; color: #94a3b8; display: none;">
|
||||||
|
Expire time: <span id="expirationText">-</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
|
|
@ -81,7 +89,7 @@
|
||||||
<span>Case Insensitive</span>
|
<span>Case Insensitive</span>
|
||||||
</label>
|
</label>
|
||||||
<label class="regex-checkbox">
|
<label class="regex-checkbox">
|
||||||
<input type="checkbox" id="fuzzyCheckbox">
|
<input type="checkbox" id="fuzzyCheckbox" checked>
|
||||||
<span>Fuzzy Search</span>
|
<span>Fuzzy Search</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
@ -90,6 +98,7 @@
|
||||||
<button class="filter-btn active" data-filter="all">All Resources</button>
|
<button class="filter-btn active" data-filter="all">All Resources</button>
|
||||||
<button class="filter-btn" data-filter="ec2">EC2 Instances</button>
|
<button class="filter-btn" data-filter="ec2">EC2 Instances</button>
|
||||||
<button class="filter-btn" data-filter="sg">Security Groups</button>
|
<button class="filter-btn" data-filter="sg">Security Groups</button>
|
||||||
|
<button class="filter-btn" data-filter="ip-search" style="background: #8b5cf6; color: white;">IP Search</button>
|
||||||
</div>
|
</div>
|
||||||
<div class="view-toggle">
|
<div class="view-toggle">
|
||||||
<button class="view-toggle-btn active" data-view="cards">Cards</button>
|
<button class="view-toggle-btn active" data-view="cards">Cards</button>
|
||||||
|
|
@ -101,6 +110,83 @@
|
||||||
|
|
||||||
<div class="search-results" id="searchResults"></div>
|
<div class="search-results" id="searchResults"></div>
|
||||||
|
|
||||||
|
<div class="ip-search-view" id="ipSearchView" style="display: none;">
|
||||||
|
<div class="ip-search-container">
|
||||||
|
<div style="background: white; border-radius: 0.5rem; padding: 1.5rem; box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1); margin-bottom: 1rem;">
|
||||||
|
<h3 style="margin: 0 0 1rem 0; color: #1e293b;">Search by IP Address with Filters</h3>
|
||||||
|
<p style="color: #64748b; margin-bottom: 1rem;">Search for an IP address or CIDR block across security group rules and EC2 instances. Combine with text filters for precise results.</p>
|
||||||
|
|
||||||
|
<div style="display: grid; grid-template-columns: 2fr 1.5fr 1fr 1fr; gap: 0.5rem; margin-bottom: 0.75rem;">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
id="ipSearchInput"
|
||||||
|
class="search-input"
|
||||||
|
placeholder="IP address (e.g., 10.0.1.5)"
|
||||||
|
style="margin: 0;"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
id="ipTextFilter"
|
||||||
|
class="search-input"
|
||||||
|
placeholder="AND text (e.g., arit)"
|
||||||
|
style="margin: 0;"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
id="ipPortFilter"
|
||||||
|
class="search-input"
|
||||||
|
placeholder="AND port (e.g., 443)"
|
||||||
|
style="margin: 0;"
|
||||||
|
>
|
||||||
|
<button onclick="performIPSearch()" style="padding: 0.5rem 1rem; background: #8b5cf6; color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-weight: 500; white-space: nowrap;">Search</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div style="display: flex; gap: 1rem; align-items: center; justify-content: space-between;">
|
||||||
|
<div style="display: flex; gap: 1rem; align-items: center;">
|
||||||
|
<label style="font-size: 0.875rem; color: #64748b; font-weight: 500;">Resource Type:</label>
|
||||||
|
<label style="display: flex; align-items: center; gap: 0.375rem; font-size: 0.875rem; cursor: pointer;">
|
||||||
|
<input type="radio" name="ipResourceType" value="all" checked style="cursor: pointer;">
|
||||||
|
<span>All Resources</span>
|
||||||
|
</label>
|
||||||
|
<label style="display: flex; align-items: center; gap: 0.375rem; font-size: 0.875rem; cursor: pointer;">
|
||||||
|
<input type="radio" name="ipResourceType" value="ec2" style="cursor: pointer;">
|
||||||
|
<span>EC2 Only</span>
|
||||||
|
</label>
|
||||||
|
<label style="display: flex; align-items: center; gap: 0.375rem; font-size: 0.875rem; cursor: pointer;">
|
||||||
|
<input type="radio" name="ipResourceType" value="sg" style="cursor: pointer;">
|
||||||
|
<span>Security Groups Only</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div class="view-toggle" style="margin: 0;">
|
||||||
|
<button class="ip-view-toggle-btn active" data-ip-view="cards" onclick="toggleIPView('cards')">Cards</button>
|
||||||
|
<button class="ip-view-toggle-btn" data-ip-view="table" onclick="toggleIPView('table')">Table</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div id="ipSearchResults"></div>
|
||||||
|
<div class="results-table-view" id="ipResultsTableView">
|
||||||
|
<div class="results-table-container">
|
||||||
|
<table class="results-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th onclick="sortIPTable('type')" style="cursor: pointer;">Type <span id="ip-sort-type"></span></th>
|
||||||
|
<th onclick="sortIPTable('name')" style="cursor: pointer;">Name <span id="ip-sort-name"></span></th>
|
||||||
|
<th onclick="sortIPTable('id')" style="cursor: pointer;">ID <span id="ip-sort-id"></span></th>
|
||||||
|
<th onclick="sortIPTable('account_name')" style="cursor: pointer;">Account <span id="ip-sort-account_name"></span></th>
|
||||||
|
<th onclick="sortIPTable('ip')" style="cursor: pointer;">IP Address <span id="ip-sort-ip"></span></th>
|
||||||
|
<th onclick="sortIPTable('direction')" style="cursor: pointer;">Direction <span id="ip-sort-direction"></span></th>
|
||||||
|
<th onclick="sortIPTable('protocol')" style="cursor: pointer;">Protocol <span id="ip-sort-protocol"></span></th>
|
||||||
|
<th onclick="sortIPTable('port_range')" style="cursor: pointer;">Port <span id="ip-sort-port_range"></span></th>
|
||||||
|
<th>Source</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="ipTableResultsBody"></tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="results-table-view" id="resultsTableView">
|
<div class="results-table-view" id="resultsTableView">
|
||||||
<div class="results-table-container">
|
<div class="results-table-container">
|
||||||
<table class="results-table">
|
<table class="results-table">
|
||||||
|
|
@ -127,10 +213,14 @@
|
||||||
<script>
|
<script>
|
||||||
let currentFilter = 'all';
|
let currentFilter = 'all';
|
||||||
let currentView = 'cards';
|
let currentView = 'cards';
|
||||||
|
let currentIPView = 'cards';
|
||||||
let searchTimeout = null;
|
let searchTimeout = null;
|
||||||
let currentResults = [];
|
let currentResults = [];
|
||||||
|
let currentIPResults = [];
|
||||||
let sortColumn = null;
|
let sortColumn = null;
|
||||||
let sortDirection = 'asc';
|
let sortDirection = 'asc';
|
||||||
|
let ipSortColumn = null;
|
||||||
|
let ipSortDirection = 'asc';
|
||||||
|
|
||||||
function buildGitHubUrl(gitOrg, gitRepo, gitFile) {
|
function buildGitHubUrl(gitOrg, gitRepo, gitFile) {
|
||||||
if (!gitOrg || !gitRepo || !gitFile) {
|
if (!gitOrg || !gitRepo || !gitFile) {
|
||||||
|
|
@ -163,11 +253,6 @@
|
||||||
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
|
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAccountClass(accountName) {
|
|
||||||
// No longer needed for classes, but keeping for backwards compatibility
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
function getAccountStyle(accountName) {
|
function getAccountStyle(accountName) {
|
||||||
if (!accountName) return '';
|
if (!accountName) return '';
|
||||||
return `color: ${getColorFromName(accountName)}; font-weight: 600;`;
|
return `color: ${getColorFromName(accountName)}; font-weight: 600;`;
|
||||||
|
|
@ -176,6 +261,7 @@
|
||||||
// Load stats and tags on page load
|
// Load stats and tags on page load
|
||||||
loadStats();
|
loadStats();
|
||||||
loadTags();
|
loadTags();
|
||||||
|
updateSessionExpiration();
|
||||||
|
|
||||||
// Load all resources on page load
|
// Load all resources on page load
|
||||||
performSearch('');
|
performSearch('');
|
||||||
|
|
@ -185,6 +271,11 @@
|
||||||
clearTimeout(searchTimeout);
|
clearTimeout(searchTimeout);
|
||||||
const query = e.target.value.trim();
|
const query = e.target.value.trim();
|
||||||
|
|
||||||
|
// Hide details view when search is empty
|
||||||
|
if (query === '') {
|
||||||
|
hideDetails();
|
||||||
|
}
|
||||||
|
|
||||||
searchTimeout = setTimeout(() => {
|
searchTimeout = setTimeout(() => {
|
||||||
performSearch(query);
|
performSearch(query);
|
||||||
}, 300);
|
}, 300);
|
||||||
|
|
@ -219,15 +310,25 @@
|
||||||
this.classList.add('active');
|
this.classList.add('active');
|
||||||
currentFilter = this.dataset.filter;
|
currentFilter = this.dataset.filter;
|
||||||
|
|
||||||
|
if (currentFilter === 'ip-search') {
|
||||||
|
// Show IP search view, hide others
|
||||||
|
document.getElementById('searchResults').style.display = 'none';
|
||||||
|
document.getElementById('resultsTableView').classList.remove('active');
|
||||||
|
document.getElementById('ipSearchView').style.display = 'block';
|
||||||
|
} else {
|
||||||
|
// Show regular search results
|
||||||
|
document.getElementById('ipSearchView').style.display = 'none';
|
||||||
const query = document.getElementById('searchInput').value.trim();
|
const query = document.getElementById('searchInput').value.trim();
|
||||||
performSearch(query);
|
performSearch(query);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// View toggle handlers
|
// View toggle handlers
|
||||||
document.querySelectorAll('.view-toggle-btn').forEach(btn => {
|
document.querySelectorAll('.view-toggle-btn').forEach(btn => {
|
||||||
btn.addEventListener('click', function() {
|
btn.addEventListener('click', function() {
|
||||||
document.querySelectorAll('.view-toggle-btn').forEach(b => b.classList.remove('active'));
|
if (!this.dataset.view) return; // skip Export and other non-toggle buttons
|
||||||
|
document.querySelectorAll('.view-toggle-btn[data-view]').forEach(b => b.classList.remove('active'));
|
||||||
this.classList.add('active');
|
this.classList.add('active');
|
||||||
currentView = this.dataset.view;
|
currentView = this.dataset.view;
|
||||||
|
|
||||||
|
|
@ -363,6 +464,62 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function updateSessionExpiration() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/session-expiration');
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
const expirationDiv = document.getElementById('sessionExpiration');
|
||||||
|
const expirationText = document.getElementById('expirationText');
|
||||||
|
|
||||||
|
if (!data.has_session) {
|
||||||
|
expirationDiv.style.display = 'none';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const secondsRemaining = data.seconds_remaining;
|
||||||
|
|
||||||
|
let displayText;
|
||||||
|
let color = '#94a3b8';
|
||||||
|
let fontWeight = '400';
|
||||||
|
|
||||||
|
if (secondsRemaining <= 0) {
|
||||||
|
displayText = 'EXPIRED';
|
||||||
|
color = '#ef4444';
|
||||||
|
fontWeight = '600';
|
||||||
|
} else if (secondsRemaining < 60) {
|
||||||
|
displayText = `${secondsRemaining}s`;
|
||||||
|
color = '#ef4444';
|
||||||
|
fontWeight = '600';
|
||||||
|
} else if (secondsRemaining < 600) {
|
||||||
|
// Under 10 minutes — show MM:SS
|
||||||
|
const m = Math.floor(secondsRemaining / 60);
|
||||||
|
const s = secondsRemaining % 60;
|
||||||
|
displayText = `${m}:${String(s).padStart(2, '0')}`;
|
||||||
|
color = '#f59e0b';
|
||||||
|
fontWeight = '600';
|
||||||
|
} else {
|
||||||
|
const hours = Math.floor(secondsRemaining / 3600);
|
||||||
|
const minutes = Math.floor((secondsRemaining % 3600) / 60);
|
||||||
|
displayText = hours > 0
|
||||||
|
? (minutes > 0 ? `${hours}h ${minutes}m` : `${hours}h`)
|
||||||
|
: `${minutes}m`;
|
||||||
|
}
|
||||||
|
|
||||||
|
expirationText.textContent = displayText;
|
||||||
|
expirationText.style.color = color;
|
||||||
|
expirationText.style.fontWeight = fontWeight;
|
||||||
|
expirationDiv.style.display = 'block';
|
||||||
|
|
||||||
|
// Update every 10 seconds
|
||||||
|
setTimeout(updateSessionExpiration, 10000);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error updating session expiration:', error);
|
||||||
|
// Retry in 10 seconds
|
||||||
|
setTimeout(updateSessionExpiration, 10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function refreshData() {
|
async function refreshData() {
|
||||||
const btn = document.getElementById('refreshBtn');
|
const btn = document.getElementById('refreshBtn');
|
||||||
const originalText = btn.textContent;
|
const originalText = btn.textContent;
|
||||||
|
|
@ -411,6 +568,7 @@
|
||||||
if (data.status === 'complete' || data.message === 'COMPLETE') {
|
if (data.status === 'complete' || data.message === 'COMPLETE') {
|
||||||
// Reload stats (including refresh timestamps) and search results
|
// Reload stats (including refresh timestamps) and search results
|
||||||
await loadStats();
|
await loadStats();
|
||||||
|
updateSessionExpiration();
|
||||||
performSearch(document.getElementById('searchInput').value.trim());
|
performSearch(document.getElementById('searchInput').value.trim());
|
||||||
btn.textContent = originalText;
|
btn.textContent = originalText;
|
||||||
btn.disabled = false;
|
btn.disabled = false;
|
||||||
|
|
@ -430,6 +588,38 @@
|
||||||
btn.disabled = false;
|
btn.disabled = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function clearAndRefresh() {
|
||||||
|
if (!confirm('Clear all cached data and re-fetch from AWS?\n\nThis will wipe the database before refreshing.')) return;
|
||||||
|
|
||||||
|
const btn = document.getElementById('clearRefreshBtn');
|
||||||
|
const refreshBtn = document.getElementById('refreshBtn');
|
||||||
|
btn.disabled = true;
|
||||||
|
refreshBtn.disabled = true;
|
||||||
|
btn.textContent = 'Clearing...';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/clear-db', { method: 'POST' });
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (!data.success) {
|
||||||
|
alert('Failed to clear database: ' + (data.error || 'Unknown error'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update stats to show the cleared state
|
||||||
|
await loadStats();
|
||||||
|
|
||||||
|
// Now re-fetch from AWS using cached sessions
|
||||||
|
await refreshData();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Clear and refresh error:', error);
|
||||||
|
alert('Failed to clear database.');
|
||||||
|
} finally {
|
||||||
|
btn.textContent = 'Clear & Refresh';
|
||||||
|
btn.disabled = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function fuzzyMatch(str, pattern) {
|
function fuzzyMatch(str, pattern) {
|
||||||
// Simple fuzzy matching: checks if pattern characters appear in order in str
|
// Simple fuzzy matching: checks if pattern characters appear in order in str
|
||||||
if (!pattern) return true;
|
if (!pattern) return true;
|
||||||
|
|
@ -1243,6 +1433,396 @@
|
||||||
alert('Failed to export SG rules');
|
alert('Failed to export SG rules');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IP Search Functions
|
||||||
|
async function performIPSearch() {
|
||||||
|
const ipInput = document.getElementById('ipSearchInput').value.trim();
|
||||||
|
const textFilter = document.getElementById('ipTextFilter').value.trim();
|
||||||
|
const portFilter = document.getElementById('ipPortFilter').value.trim();
|
||||||
|
const resourceType = document.querySelector('input[name="ipResourceType"]:checked').value;
|
||||||
|
const resultsDiv = document.getElementById('ipSearchResults');
|
||||||
|
|
||||||
|
if (!ipInput) {
|
||||||
|
resultsDiv.innerHTML = `
|
||||||
|
<div class="empty-state">
|
||||||
|
<div class="empty-state-icon">⚠️</div>
|
||||||
|
<p>Please enter an IP address or CIDR block</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
resultsDiv.innerHTML = '<div class="loading"><div class="spinner"></div>Searching...</div>';
|
||||||
|
|
||||||
|
try {
|
||||||
|
let url = `/api/search-ip?ip=${encodeURIComponent(ipInput)}&type=${resourceType}`;
|
||||||
|
if (textFilter) {
|
||||||
|
url += `&text=${encodeURIComponent(textFilter)}`;
|
||||||
|
}
|
||||||
|
if (portFilter) {
|
||||||
|
url += `&port=${encodeURIComponent(portFilter)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(url);
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (data.error) {
|
||||||
|
resultsDiv.innerHTML = `
|
||||||
|
<div class="empty-state">
|
||||||
|
<div class="empty-state-icon">⚠️</div>
|
||||||
|
<p>Error: ${data.error}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.count === 0) {
|
||||||
|
const filterText = textFilter ? ` with "${textFilter}"` : '';
|
||||||
|
resultsDiv.innerHTML = `
|
||||||
|
<div class="empty-state">
|
||||||
|
<div class="empty-state-icon">🔍</div>
|
||||||
|
<p>No results found for IP "${ipInput}"${filterText}</p>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
renderIPSearchResults(data.results, resultsDiv, ipInput, textFilter, resourceType);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error searching IP:', error);
|
||||||
|
resultsDiv.innerHTML = '<div class="empty-state">Error performing IP search</div>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleIPView(view) {
|
||||||
|
currentIPView = view;
|
||||||
|
const resultsDiv = document.getElementById('ipSearchResults');
|
||||||
|
const tableDiv = document.getElementById('ipResultsTableView');
|
||||||
|
|
||||||
|
// Update button states - only for IP search view toggle buttons
|
||||||
|
document.querySelectorAll('.ip-view-toggle-btn').forEach(btn => {
|
||||||
|
btn.classList.remove('active');
|
||||||
|
if (btn.dataset.ipView === view) {
|
||||||
|
btn.classList.add('active');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (view === 'table') {
|
||||||
|
resultsDiv.style.display = 'none';
|
||||||
|
tableDiv.classList.add('active');
|
||||||
|
} else {
|
||||||
|
resultsDiv.style.display = 'block';
|
||||||
|
tableDiv.classList.remove('active');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortIPTable(column) {
|
||||||
|
if (ipSortColumn === column) {
|
||||||
|
ipSortDirection = ipSortDirection === 'asc' ? 'desc' : 'asc';
|
||||||
|
} else {
|
||||||
|
ipSortColumn = column;
|
||||||
|
ipSortDirection = 'asc';
|
||||||
|
}
|
||||||
|
|
||||||
|
currentIPResults.sort((a, b) => {
|
||||||
|
let aVal = a[column] || '';
|
||||||
|
let bVal = b[column] || '';
|
||||||
|
|
||||||
|
if (typeof aVal === 'string') {
|
||||||
|
aVal = aVal.toLowerCase();
|
||||||
|
bVal = bVal.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ipSortDirection === 'asc') {
|
||||||
|
return aVal > bVal ? 1 : aVal < bVal ? -1 : 0;
|
||||||
|
} else {
|
||||||
|
return aVal < bVal ? 1 : aVal > bVal ? -1 : 0;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update sort indicators
|
||||||
|
document.querySelectorAll('[id^="ip-sort-"]').forEach(el => el.textContent = '');
|
||||||
|
const indicator = document.getElementById(`ip-sort-${column}`);
|
||||||
|
if (indicator) {
|
||||||
|
indicator.textContent = ipSortDirection === 'asc' ? ' ▲' : ' ▼';
|
||||||
|
}
|
||||||
|
|
||||||
|
renderIPTableView(currentIPResults, document.getElementById('ipTableResultsBody'));
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderIPSearchResults(results, resultsDiv, ipQuery, textFilter, resourceType) {
|
||||||
|
const sgRules = results.sg_rules || [];
|
||||||
|
const ec2Instances = results.ec2_instances || [];
|
||||||
|
|
||||||
|
// Flatten results for table view
|
||||||
|
const flatResults = [];
|
||||||
|
|
||||||
|
// Add EC2 instances to flat results
|
||||||
|
ec2Instances.forEach(ec2 => {
|
||||||
|
flatResults.push({
|
||||||
|
type: 'ec2',
|
||||||
|
name: ec2.tag_name || ec2.instance_id,
|
||||||
|
id: ec2.instance_id,
|
||||||
|
account_name: ec2.account_name,
|
||||||
|
account_id: ec2.account_id,
|
||||||
|
ip: ec2.private_ip_address,
|
||||||
|
direction: '-',
|
||||||
|
protocol: '-',
|
||||||
|
port_range: '-',
|
||||||
|
source: '-',
|
||||||
|
state: ec2.state,
|
||||||
|
tag_git_repo: ec2.tag_git_repo,
|
||||||
|
tag_git_org: ec2.tag_git_org,
|
||||||
|
tag_git_file: ec2.tag_git_file,
|
||||||
|
security_groups_count: ec2.security_groups_id_list ? ec2.security_groups_id_list.split(';').filter(id => id.trim()).length : 0
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add SG rules to flat results
|
||||||
|
sgRules.forEach(rule => {
|
||||||
|
flatResults.push({
|
||||||
|
type: 'sg',
|
||||||
|
name: rule.tag_name || rule.group_name,
|
||||||
|
id: rule.group_id,
|
||||||
|
account_name: rule.account_name,
|
||||||
|
account_id: rule.account_id,
|
||||||
|
ip: rule.source,
|
||||||
|
direction: rule.direction,
|
||||||
|
protocol: rule.protocol,
|
||||||
|
port_range: rule.port_range,
|
||||||
|
source: rule.source,
|
||||||
|
description: rule.description,
|
||||||
|
tag_wave: rule.tag_wave,
|
||||||
|
tag_git_repo: rule.tag_git_repo,
|
||||||
|
tag_git_org: rule.tag_git_org,
|
||||||
|
tag_git_file: rule.tag_git_file
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store results for sorting
|
||||||
|
currentIPResults = flatResults;
|
||||||
|
|
||||||
|
// Render table view
|
||||||
|
renderIPTableView(flatResults, document.getElementById('ipTableResultsBody'));
|
||||||
|
|
||||||
|
// Render cards view
|
||||||
|
let html = `
|
||||||
|
<div style="background: white; border-radius: 0.5rem; padding: 1rem; box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1); margin-bottom: 1rem;">
|
||||||
|
<h3 style="margin: 0; color: #1e293b;">
|
||||||
|
Found ${ec2Instances.length} EC2 instance(s) and ${sgRules.length} security group rule(s)
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Render EC2 instances if any
|
||||||
|
if (ec2Instances.length > 0) {
|
||||||
|
html += `
|
||||||
|
<div style="background: white; border-radius: 0.5rem; padding: 1.5rem; box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1); margin-bottom: 1rem;">
|
||||||
|
<h3 style="margin: 0 0 1rem 0; color: #1e293b; border-bottom: 2px solid #e2e8f0; padding-bottom: 0.5rem;">
|
||||||
|
<span style="background: #dbeafe; color: #1e40af; padding: 0.25rem 0.75rem; border-radius: 0.375rem; font-size: 0.875rem; font-weight: 600; margin-right: 0.5rem;">EC2</span>
|
||||||
|
EC2 Instances (${ec2Instances.length})
|
||||||
|
</h3>
|
||||||
|
<div style="display: grid; gap: 0.75rem;">
|
||||||
|
`;
|
||||||
|
|
||||||
|
ec2Instances.forEach(ec2 => {
|
||||||
|
const githubUrl = buildGitHubUrl(ec2.tag_git_org, ec2.tag_git_repo, ec2.tag_git_file);
|
||||||
|
const sourceLink = githubUrl
|
||||||
|
? `<a href="${githubUrl}" target="_blank" style="color: var(--primary-color); text-decoration: underline;">${ec2.tag_git_repo || 'GitHub'}</a>`
|
||||||
|
: (ec2.tag_git_repo || 'N/A');
|
||||||
|
|
||||||
|
const accountStyle = getAccountStyle(ec2.account_name);
|
||||||
|
const sgCount = ec2.security_groups_id_list ? ec2.security_groups_id_list.split(';').filter(id => id.trim()).length : 0;
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<div style="border: 1px solid #e2e8f0; border-radius: 0.375rem; padding: 1rem; cursor: pointer; transition: all 0.2s;" onclick="showDetails('ec2', '${ec2.instance_id}')" onmouseover="this.style.borderColor='#8b5cf6'; this.style.backgroundColor='#faf5ff'" onmouseout="this.style.borderColor='#e2e8f0'; this.style.backgroundColor='white'">
|
||||||
|
<div style="display: flex; justify-content: space-between; align-items: start; margin-bottom: 0.5rem;">
|
||||||
|
<div style="font-weight: 600; color: #1e293b;">${ec2.tag_name || ec2.instance_id}</div>
|
||||||
|
<span class="status-badge ${ec2.state}" style="font-size: 0.75rem;">${ec2.state}</span>
|
||||||
|
</div>
|
||||||
|
<div style="color: #64748b; font-size: 0.875rem;">
|
||||||
|
<div><strong>Instance ID:</strong> <span style="font-family: monospace;">${ec2.instance_id}</span></div>
|
||||||
|
<div><strong>Private IP:</strong> <span style="font-family: monospace; font-weight: 600; color: #8b5cf6;">${ec2.private_ip_address}</span></div>
|
||||||
|
<div><strong>Account:</strong> <span style="${accountStyle}">${ec2.account_name}</span></div>
|
||||||
|
<div><strong>Security Groups:</strong> ${sgCount} | <strong>Source:</strong> ${sourceLink}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
|
||||||
|
html += `
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render Security Group rules if any
|
||||||
|
if (sgRules.length > 0) {
|
||||||
|
const groupedResults = {};
|
||||||
|
|
||||||
|
// Group results by security group
|
||||||
|
sgRules.forEach(rule => {
|
||||||
|
const key = rule.group_id;
|
||||||
|
if (!groupedResults[key]) {
|
||||||
|
groupedResults[key] = {
|
||||||
|
group_id: rule.group_id,
|
||||||
|
group_name: rule.group_name,
|
||||||
|
tag_name: rule.tag_name,
|
||||||
|
account_name: rule.account_name,
|
||||||
|
account_id: rule.account_id,
|
||||||
|
tag_wave: rule.tag_wave,
|
||||||
|
tag_git_repo: rule.tag_git_repo,
|
||||||
|
tag_git_org: rule.tag_git_org,
|
||||||
|
tag_git_file: rule.tag_git_file,
|
||||||
|
rules: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
groupedResults[key].rules.push(rule);
|
||||||
|
});
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<div style="background: white; border-radius: 0.5rem; padding: 1.5rem; box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1); margin-bottom: 1rem;">
|
||||||
|
<h3 style="margin: 0 0 1rem 0; color: #1e293b; border-bottom: 2px solid #e2e8f0; padding-bottom: 0.5rem;">
|
||||||
|
<span style="background: #fce7f3; color: #9f1239; padding: 0.25rem 0.75rem; border-radius: 0.375rem; font-size: 0.875rem; font-weight: 600; margin-right: 0.5rem;">SG</span>
|
||||||
|
Security Group Rules (${Object.keys(groupedResults).length} group(s), ${sgRules.length} rule(s))
|
||||||
|
</h3>
|
||||||
|
`;
|
||||||
|
|
||||||
|
Object.values(groupedResults).forEach(sg => {
|
||||||
|
const githubUrl = buildGitHubUrl(sg.tag_git_org, sg.tag_git_repo, sg.tag_git_file);
|
||||||
|
const sourceLink = githubUrl
|
||||||
|
? `<a href="${githubUrl}" target="_blank" style="color: var(--primary-color); text-decoration: underline;">${sg.tag_git_repo || 'GitHub'}</a>`
|
||||||
|
: (sg.tag_git_repo || 'N/A');
|
||||||
|
|
||||||
|
const accountStyle = getAccountStyle(sg.account_name);
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<div style="border: 1px solid #e2e8f0; border-radius: 0.375rem; padding: 1rem; margin-bottom: 1rem;">
|
||||||
|
<div style="display: flex; justify-content: space-between; align-items: start; margin-bottom: 1rem;">
|
||||||
|
<div>
|
||||||
|
<h4 style="margin: 0 0 0.5rem 0; color: #1e293b; cursor: pointer;" onclick="showDetails('sg', '${sg.group_id}')">${sg.tag_name || sg.group_name}</h4>
|
||||||
|
<div style="color: #64748b; font-size: 0.875rem;">
|
||||||
|
<strong>Group ID:</strong> <span style="font-family: monospace;">${sg.group_id}</span> |
|
||||||
|
<strong>Account:</strong> <span style="${accountStyle}">${sg.account_name}</span> |
|
||||||
|
<strong>Wave:</strong> ${sg.tag_wave || 'N/A'} |
|
||||||
|
<strong>Source:</strong> ${sourceLink}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<button onclick="event.stopPropagation(); showDetails('sg', '${sg.group_id}')" style="padding: 0.5rem 1rem; background: var(--primary-color); color: white; border: none; border-radius: 0.375rem; cursor: pointer; font-size: 0.875rem;">View Details</button>
|
||||||
|
</div>
|
||||||
|
<div style="overflow-x: auto;">
|
||||||
|
<table style="width: 100%; border-collapse: collapse; font-size: 0.875rem;">
|
||||||
|
<thead>
|
||||||
|
<tr style="background: #f8fafc; border-bottom: 2px solid #e2e8f0;">
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Direction</th>
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Protocol</th>
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Port Range</th>
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Source Type</th>
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Source</th>
|
||||||
|
<th style="padding: 0.75rem; text-align: left; font-weight: 600; color: #475569;">Description</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
`;
|
||||||
|
|
||||||
|
sg.rules.forEach(rule => {
|
||||||
|
const directionBadge = rule.direction === 'ingress'
|
||||||
|
? '<span style="display: inline-block; padding: 0.125rem 0.5rem; background: #dbeafe; color: #1e40af; border-radius: 0.25rem; font-weight: 500;">Ingress</span>'
|
||||||
|
: '<span style="display: inline-block; padding: 0.125rem 0.5rem; background: #fce7f3; color: #9f1239; border-radius: 0.25rem; font-weight: 500;">Egress</span>';
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<tr style="border-bottom: 1px solid #e2e8f0;">
|
||||||
|
<td style="padding: 0.75rem;">${directionBadge}</td>
|
||||||
|
<td style="padding: 0.75rem; font-family: monospace;">${rule.protocol}</td>
|
||||||
|
<td style="padding: 0.75rem; font-family: monospace;">${rule.port_range}</td>
|
||||||
|
<td style="padding: 0.75rem;">${rule.source_type}</td>
|
||||||
|
<td style="padding: 0.75rem; font-family: monospace; font-weight: 600; color: #8b5cf6;">${rule.source}</td>
|
||||||
|
<td style="padding: 0.75rem; color: #64748b;">${rule.description || '-'}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
|
||||||
|
html += `
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
|
||||||
|
html += `</div>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
resultsDiv.innerHTML = html;
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderIPTableView(flatResults, tableBody) {
|
||||||
|
if (!flatResults || flatResults.length === 0) {
|
||||||
|
tableBody.innerHTML = '<tr><td colspan="9" style="text-align: center; padding: 2rem;">No results found</td></tr>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tableBody.innerHTML = flatResults.map(item => {
|
||||||
|
const isEC2 = item.type === 'ec2';
|
||||||
|
const githubUrl = buildGitHubUrl(item.tag_git_org, item.tag_git_repo, item.tag_git_file);
|
||||||
|
const sourceCell = githubUrl
|
||||||
|
? `<a href="${githubUrl}" target="_blank" onclick="event.stopPropagation()" style="color: var(--primary-color); text-decoration: underline;">${item.tag_git_repo || 'GitHub'}</a>`
|
||||||
|
: (item.tag_git_repo || '-');
|
||||||
|
|
||||||
|
const accountStyle = getAccountStyle(item.account_name);
|
||||||
|
|
||||||
|
const directionBadge = item.direction === 'ingress'
|
||||||
|
? '<span style="display: inline-block; padding: 0.125rem 0.5rem; background: #dbeafe; color: #1e40af; border-radius: 0.25rem; font-weight: 500; font-size: 0.75rem;">Ingress</span>'
|
||||||
|
: item.direction === 'egress'
|
||||||
|
? '<span style="display: inline-block; padding: 0.125rem 0.5rem; background: #fce7f3; color: #9f1239; border-radius: 0.25rem; font-weight: 500; font-size: 0.75rem;">Egress</span>'
|
||||||
|
: '-';
|
||||||
|
|
||||||
|
return `
|
||||||
|
<tr onclick="showDetails('${item.type}', '${item.id}')" style="cursor: pointer;">
|
||||||
|
<td><span class="table-type-badge ${item.type}">${item.type.toUpperCase()}</span></td>
|
||||||
|
<td>${item.name}</td>
|
||||||
|
<td class="table-cell-mono">${item.id}</td>
|
||||||
|
<td><span style="${accountStyle}">${item.account_name}</span></td>
|
||||||
|
<td class="table-cell-mono" style="font-weight: 600; color: #8b5cf6;">${item.ip || '-'}</td>
|
||||||
|
<td>${directionBadge}</td>
|
||||||
|
<td class="table-cell-mono">${item.protocol}</td>
|
||||||
|
<td class="table-cell-mono">${item.port_range}</td>
|
||||||
|
<td class="table-cell-secondary">${sourceCell}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
}).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable Enter key for IP search
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
const ipSearchInput = document.getElementById('ipSearchInput');
|
||||||
|
const ipTextFilter = document.getElementById('ipTextFilter');
|
||||||
|
const ipPortFilter = document.getElementById('ipPortFilter');
|
||||||
|
|
||||||
|
if (ipSearchInput) {
|
||||||
|
ipSearchInput.addEventListener('keypress', function(e) {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
performIPSearch();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ipTextFilter) {
|
||||||
|
ipTextFilter.addEventListener('keypress', function(e) {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
performIPSearch();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ipPortFilter) {
|
||||||
|
ipPortFilter.addEventListener('keypress', function(e) {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
performIPSearch();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<footer style="text-align: center; padding: 2rem 1rem; margin-top: 4rem; border-top: 1px solid var(--border-color); color: #64748b; font-size: 0.875rem;">
|
<footer style="text-align: center; padding: 2rem 1rem; margin-top: 4rem; border-top: 1px solid var(--border-color); color: #64748b; font-size: 0.875rem;">
|
||||||
|
|
|
||||||
406
wiki/Building-and-Publishing.md
Normal file
406
wiki/Building-and-Publishing.md
Normal file
|
|
@ -0,0 +1,406 @@
|
||||||
|
# Building and Publishing to Quay.io
|
||||||
|
|
||||||
|
This guide covers how to build and publish SGO container images to Quay.io.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
1. **Quay.io account**: Create account at https://quay.io
|
||||||
|
2. **Repository created**: Create a repository (e.g., `yourusername/sgo`)
|
||||||
|
3. **Docker or Podman installed**: For building and pushing images
|
||||||
|
|
||||||
|
## Manual Build and Push
|
||||||
|
|
||||||
|
### Step 1: Login to Quay.io
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Using Docker
|
||||||
|
docker login quay.io
|
||||||
|
|
||||||
|
# Using Podman
|
||||||
|
podman login quay.io
|
||||||
|
|
||||||
|
# You'll be prompted for:
|
||||||
|
# Username: your_quay_username
|
||||||
|
# Password: your_quay_password (or robot account token)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 2: Build the Image
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /home/eduardo_figueroa/Dev/sgo
|
||||||
|
|
||||||
|
# Build with Docker
|
||||||
|
docker build -t quay.io/yourusername/sgo:latest .
|
||||||
|
|
||||||
|
# Build with Podman
|
||||||
|
podman build -t quay.io/yourusername/sgo:latest .
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3: Tag for Version (Optional)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Tag with version number
|
||||||
|
docker tag quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:v1.0.0
|
||||||
|
|
||||||
|
# Tag with git commit
|
||||||
|
docker tag quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:$(git rev-parse --short HEAD)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4: Push to Quay.io
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Push latest tag
|
||||||
|
docker push quay.io/yourusername/sgo:latest
|
||||||
|
|
||||||
|
# Push version tag
|
||||||
|
docker push quay.io/yourusername/sgo:v1.0.0
|
||||||
|
|
||||||
|
# Push all tags
|
||||||
|
docker push quay.io/yourusername/sgo --all-tags
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using Robot Accounts (Recommended for CI/CD)
|
||||||
|
|
||||||
|
Robot accounts provide scoped credentials for automation.
|
||||||
|
|
||||||
|
### Create Robot Account
|
||||||
|
|
||||||
|
1. Go to your repository on Quay.io
|
||||||
|
2. Click **Settings** → **Robot Accounts**
|
||||||
|
3. Click **Create Robot Account**
|
||||||
|
4. Name it (e.g., `sgo_builder`)
|
||||||
|
5. Grant **Write** permissions
|
||||||
|
6. Save the credentials (username and token)
|
||||||
|
|
||||||
|
### Login with Robot Account
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Format: quay.io+username+robotname
|
||||||
|
docker login quay.io
|
||||||
|
Username: yourusername+sgo_builder
|
||||||
|
Password: <robot-token>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Automated Build with Woodpecker CI
|
||||||
|
|
||||||
|
### Step 1: Add Secrets to Woodpecker
|
||||||
|
|
||||||
|
Add these secrets to your Woodpecker CI configuration:
|
||||||
|
|
||||||
|
- `QUAY_USERNAME` - Your quay.io username or robot account
|
||||||
|
- `QUAY_PASSWORD` - Your quay.io password or robot token
|
||||||
|
|
||||||
|
### Step 2: Create Woodpecker Pipeline
|
||||||
|
|
||||||
|
Create `.woodpecker/docker-publish.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
pipeline:
|
||||||
|
docker-build-and-push:
|
||||||
|
image: docker:dind
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
environment:
|
||||||
|
- QUAY_USERNAME
|
||||||
|
- QUAY_PASSWORD
|
||||||
|
commands:
|
||||||
|
# Login to Quay.io
|
||||||
|
- echo "$QUAY_PASSWORD" | docker login quay.io -u "$QUAY_USERNAME" --password-stdin
|
||||||
|
|
||||||
|
# Build image
|
||||||
|
- docker build -t quay.io/yourusername/sgo:latest .
|
||||||
|
- docker tag quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:${CI_COMMIT_SHA}
|
||||||
|
|
||||||
|
# Push images
|
||||||
|
- docker push quay.io/yourusername/sgo:latest
|
||||||
|
- docker push quay.io/yourusername/sgo:${CI_COMMIT_SHA}
|
||||||
|
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- main
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3: Update Main Woodpecker Config
|
||||||
|
|
||||||
|
Add to `.woodpecker.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
when:
|
||||||
|
event: [push, pull_request, tag]
|
||||||
|
|
||||||
|
pipeline:
|
||||||
|
dependencies:
|
||||||
|
image: python:3.11-slim
|
||||||
|
commands:
|
||||||
|
- pip install -r requirements.txt
|
||||||
|
|
||||||
|
syntax-check:
|
||||||
|
image: python:3.11-slim
|
||||||
|
commands:
|
||||||
|
- python -m py_compile app.py
|
||||||
|
- python -m py_compile import_from_aws.py
|
||||||
|
- python -m py_compile import_data.py
|
||||||
|
|
||||||
|
docker-build:
|
||||||
|
image: docker:dind
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
commands:
|
||||||
|
- docker build -t sgo:${CI_COMMIT_SHA} .
|
||||||
|
|
||||||
|
security-scan:
|
||||||
|
image: python:3.11-slim
|
||||||
|
commands:
|
||||||
|
- pip install bandit safety
|
||||||
|
- bandit -r . -ll || true
|
||||||
|
- safety check --file requirements.txt || true
|
||||||
|
|
||||||
|
# Only push on main branch
|
||||||
|
publish-to-quay:
|
||||||
|
image: docker:dind
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
secrets: [quay_username, quay_password]
|
||||||
|
commands:
|
||||||
|
- echo "$QUAY_PASSWORD" | docker login quay.io -u "$QUAY_USERNAME" --password-stdin
|
||||||
|
- docker build -t quay.io/yourusername/sgo:latest .
|
||||||
|
- docker tag quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:${CI_COMMIT_SHA}
|
||||||
|
- docker push quay.io/yourusername/sgo:latest
|
||||||
|
- docker push quay.io/yourusername/sgo:${CI_COMMIT_SHA}
|
||||||
|
when:
|
||||||
|
branch: main
|
||||||
|
event: push
|
||||||
|
```
|
||||||
|
|
||||||
|
## Multi-Architecture Builds
|
||||||
|
|
||||||
|
Build for multiple architectures (amd64, arm64):
|
||||||
|
|
||||||
|
### Using Docker Buildx
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create builder
|
||||||
|
docker buildx create --name multiarch --use
|
||||||
|
|
||||||
|
# Build and push multi-arch image
|
||||||
|
docker buildx build \
|
||||||
|
--platform linux/amd64,linux/arm64 \
|
||||||
|
-t quay.io/yourusername/sgo:latest \
|
||||||
|
--push \
|
||||||
|
.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using Podman
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build for amd64
|
||||||
|
podman build --platform linux/amd64 -t quay.io/yourusername/sgo:latest-amd64 .
|
||||||
|
|
||||||
|
# Build for arm64
|
||||||
|
podman build --platform linux/arm64 -t quay.io/yourusername/sgo:latest-arm64 .
|
||||||
|
|
||||||
|
# Create and push manifest
|
||||||
|
podman manifest create quay.io/yourusername/sgo:latest
|
||||||
|
podman manifest add quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:latest-amd64
|
||||||
|
podman manifest add quay.io/yourusername/sgo:latest quay.io/yourusername/sgo:latest-arm64
|
||||||
|
podman manifest push quay.io/yourusername/sgo:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tagging Strategy
|
||||||
|
|
||||||
|
### Recommended Tags
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Latest - always points to newest build
|
||||||
|
quay.io/yourusername/sgo:latest
|
||||||
|
|
||||||
|
# Version tags - for releases
|
||||||
|
quay.io/yourusername/sgo:v1.0.0
|
||||||
|
quay.io/yourusername/sgo:v1.0
|
||||||
|
quay.io/yourusername/sgo:v1
|
||||||
|
|
||||||
|
# Git commit SHA - for specific commits
|
||||||
|
quay.io/yourusername/sgo:abc1234
|
||||||
|
|
||||||
|
# Branch name - for development branches
|
||||||
|
quay.io/yourusername/sgo:dev
|
||||||
|
quay.io/yourusername/sgo:feature-xyz
|
||||||
|
```
|
||||||
|
|
||||||
|
### Applying Multiple Tags
|
||||||
|
|
||||||
|
```bash
|
||||||
|
IMAGE_NAME="quay.io/yourusername/sgo"
|
||||||
|
VERSION="v1.0.0"
|
||||||
|
COMMIT=$(git rev-parse --short HEAD)
|
||||||
|
|
||||||
|
# Build once
|
||||||
|
docker build -t ${IMAGE_NAME}:${VERSION} .
|
||||||
|
|
||||||
|
# Apply multiple tags
|
||||||
|
docker tag ${IMAGE_NAME}:${VERSION} ${IMAGE_NAME}:latest
|
||||||
|
docker tag ${IMAGE_NAME}:${VERSION} ${IMAGE_NAME}:${COMMIT}
|
||||||
|
|
||||||
|
# Push all tags
|
||||||
|
docker push ${IMAGE_NAME}:${VERSION}
|
||||||
|
docker push ${IMAGE_NAME}:latest
|
||||||
|
docker push ${IMAGE_NAME}:${COMMIT}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verifying the Published Image
|
||||||
|
|
||||||
|
### Pull and Test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Pull the image
|
||||||
|
docker pull quay.io/yourusername/sgo:latest
|
||||||
|
|
||||||
|
# Test it works
|
||||||
|
docker run --rm \
|
||||||
|
-e AWS_CONFIG_PATH=/tmp/aws-host \
|
||||||
|
-e PUID=1000 \
|
||||||
|
-e PGID=1000 \
|
||||||
|
-v $HOME/.aws:/tmp/aws-host:ro \
|
||||||
|
-v sgo-data:/app/data \
|
||||||
|
-p 5000:5000 \
|
||||||
|
quay.io/yourusername/sgo:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check Image Details
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Inspect image
|
||||||
|
docker inspect quay.io/yourusername/sgo:latest
|
||||||
|
|
||||||
|
# Check image size
|
||||||
|
docker images quay.io/yourusername/sgo
|
||||||
|
|
||||||
|
# View image history
|
||||||
|
docker history quay.io/yourusername/sgo:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Making Repository Public
|
||||||
|
|
||||||
|
By default, Quay.io repositories are private.
|
||||||
|
|
||||||
|
To make public:
|
||||||
|
1. Go to repository on Quay.io
|
||||||
|
2. Click **Settings**
|
||||||
|
3. Change **Repository Visibility** to **Public**
|
||||||
|
4. Click **Save**
|
||||||
|
|
||||||
|
## Updating docker-compose.yml for Quay.io
|
||||||
|
|
||||||
|
Users can pull from Quay.io by updating their `docker-compose.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
sgo:
|
||||||
|
image: quay.io/yourusername/sgo:latest
|
||||||
|
# rest of configuration...
|
||||||
|
```
|
||||||
|
|
||||||
|
Or pull specific version:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
sgo:
|
||||||
|
image: quay.io/yourusername/sgo:v1.0.0
|
||||||
|
# rest of configuration...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Authentication Failed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clear old credentials
|
||||||
|
rm ~/.docker/config.json
|
||||||
|
|
||||||
|
# Login again
|
||||||
|
docker login quay.io
|
||||||
|
```
|
||||||
|
|
||||||
|
### Push Denied
|
||||||
|
|
||||||
|
- Verify repository exists on Quay.io
|
||||||
|
- Check robot account has **Write** permissions
|
||||||
|
- Ensure you're logged in to correct account
|
||||||
|
|
||||||
|
### Build Fails
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check Dockerfile syntax
|
||||||
|
docker build --no-cache -t test .
|
||||||
|
|
||||||
|
# View build logs
|
||||||
|
docker build -t test . 2>&1 | tee build.log
|
||||||
|
```
|
||||||
|
|
||||||
|
### Image Too Large
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check image size
|
||||||
|
docker images quay.io/yourusername/sgo
|
||||||
|
|
||||||
|
# Use .dockerignore to exclude files
|
||||||
|
# Already configured in project
|
||||||
|
|
||||||
|
# Use multi-stage builds (if applicable)
|
||||||
|
# Current Dockerfile is already optimized
|
||||||
|
```
|
||||||
|
|
||||||
|
## Complete Build Script
|
||||||
|
|
||||||
|
Create `scripts/build-and-push.sh`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
REGISTRY="quay.io"
|
||||||
|
USERNAME="yourusername"
|
||||||
|
REPO="sgo"
|
||||||
|
IMAGE="${REGISTRY}/${USERNAME}/${REPO}"
|
||||||
|
|
||||||
|
# Get version from git tag or use dev
|
||||||
|
VERSION=$(git describe --tags --abbrev=0 2>/dev/null || echo "dev")
|
||||||
|
COMMIT=$(git rev-parse --short HEAD)
|
||||||
|
|
||||||
|
echo "Building ${IMAGE}:${VERSION}"
|
||||||
|
|
||||||
|
# Build image
|
||||||
|
docker build -t ${IMAGE}:${VERSION} .
|
||||||
|
|
||||||
|
# Tag with multiple tags
|
||||||
|
docker tag ${IMAGE}:${VERSION} ${IMAGE}:latest
|
||||||
|
docker tag ${IMAGE}:${VERSION} ${IMAGE}:${COMMIT}
|
||||||
|
|
||||||
|
echo "Pushing to ${REGISTRY}"
|
||||||
|
|
||||||
|
# Push all tags
|
||||||
|
docker push ${IMAGE}:${VERSION}
|
||||||
|
docker push ${IMAGE}:latest
|
||||||
|
docker push ${IMAGE}:${COMMIT}
|
||||||
|
|
||||||
|
echo "Successfully pushed:"
|
||||||
|
echo " - ${IMAGE}:${VERSION}"
|
||||||
|
echo " - ${IMAGE}:latest"
|
||||||
|
echo " - ${IMAGE}:${COMMIT}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Make it executable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
chmod +x scripts/build-and-push.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Run it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/build-and-push.sh
|
||||||
|
```
|
||||||
Loading…
Reference in a new issue