Skip to content

Commit 6d03a04

Browse files
committed
Release v0.13.0: Security hardening, Docker fixes, and API improvements
1 parent c675d88 commit 6d03a04

File tree

10 files changed

+90
-17
lines changed

10 files changed

+90
-17
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ __pycache__
77
data/secure_proxy.db
88
secure_proxy.db
99
data/secure_proxy.db
10+
.env

backend/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ RUN pip install --no-cache-dir -r requirements.txt
88

99
# Install curl for healthcheck (Docker CLI removed for security)
1010
RUN apt-get update && \
11-
apt-get install -y --no-install-recommends curl && \
11+
apt-get install -y --no-install-recommends --fix-missing curl && \
1212
apt-get clean && \
1313
rm -rf /var/lib/apt/lists/*
1414

backend/app/app.py

Lines changed: 49 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ class DomainSchema(Schema):
8686
def init_db():
8787
# Create data directory if it doesn't exist
8888
data_dir = os.path.dirname(DATABASE_PATH)
89-
if not os.path.exists(data_dir):
89+
if data_dir and not os.path.exists(data_dir):
9090
os.makedirs(data_dir, exist_ok=True)
9191

9292
conn = sqlite3.connect(DATABASE_PATH)
@@ -1354,6 +1354,52 @@ def signal_handler(sig, frame):
13541354
except Exception as e:
13551355
logger.error(f"Error applying initial settings: {str(e)}")
13561356

1357+
@app.route('/api/logs', methods=['GET'])
1358+
@auth.login_required
1359+
def get_logs():
1360+
"""Get proxy logs with pagination and sorting"""
1361+
try:
1362+
# Get query parameters
1363+
limit = request.args.get('limit', 25, type=int)
1364+
offset = request.args.get('offset', 0, type=int)
1365+
sort_by = request.args.get('sort', 'timestamp')
1366+
order = request.args.get('order', 'desc')
1367+
1368+
# Validate sort column to prevent SQL injection
1369+
valid_columns = ['timestamp', 'source_ip', 'destination', 'status', 'bytes', 'method']
1370+
if sort_by not in valid_columns:
1371+
sort_by = 'timestamp'
1372+
1373+
# Validate order
1374+
if order.lower() not in ['asc', 'desc']:
1375+
order = 'desc'
1376+
1377+
conn = get_db()
1378+
cursor = conn.cursor()
1379+
1380+
# Get total count
1381+
cursor.execute("SELECT COUNT(*) FROM proxy_logs")
1382+
total_count = cursor.fetchone()[0]
1383+
1384+
# Get logs
1385+
query = f"SELECT * FROM proxy_logs ORDER BY {sort_by} {order.upper()} LIMIT ? OFFSET ?"
1386+
cursor.execute(query, (limit, offset))
1387+
1388+
logs = [dict(row) for row in cursor.fetchall()]
1389+
1390+
return jsonify({
1391+
"status": "success",
1392+
"data": logs,
1393+
"pagination": {
1394+
"total": total_count,
1395+
"limit": limit,
1396+
"offset": offset
1397+
}
1398+
})
1399+
except Exception as e:
1400+
logger.error(f"Error fetching logs: {str(e)}")
1401+
return jsonify({"status": "error", "message": "Failed to fetch logs"}), 500
1402+
13571403
@app.route('/api/logs/stats', methods=['GET'])
13581404
@auth.login_required
13591405
def get_log_stats():
@@ -3016,7 +3062,7 @@ def client_statistics():
30163062
WHERE source_ip IS NOT NULL AND source_ip != ''
30173063
GROUP BY source_ip
30183064
ORDER BY requests DESC
3019-
LIMIT 50 # Limit to top 50 clients for performance
3065+
LIMIT 50
30203066
""")
30213067
clients = [dict(row) for row in cursor.fetchall()]
30223068

@@ -3055,7 +3101,7 @@ def domain_statistics():
30553101
WHERE destination IS NOT NULL AND destination != ''
30563102
GROUP BY destination
30573103
ORDER BY requests DESC
3058-
LIMIT 50 # Limit to top 50 domains for performance
3104+
LIMIT 50
30593105
""")
30603106
domains_raw = [dict(row) for row in cursor.fetchall()]
30613107

config/custom_squid.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ connect_timeout 30 seconds
8484
dns_timeout 5 seconds
8585

8686
# Log settings
87-
debug_options ALL,INFO
87+
debug_options ALL,DEBUG
8888
access_log daemon:/var/log/squid/access.log squid
8989
cache_log /var/log/squid/cache.log
9090
cache_store_log stdio:/var/log/squid/store.log

config/ip_blacklist.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
1.1.1.1
1+
1.1.1.1
2+
1.2.3.4

data/secure_proxy.db

0 Bytes
Binary file not shown.

proxy/Dockerfile

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
FROM ubuntu:22.04
22

3-
RUN apt-get update && apt-get install -y \
3+
RUN echo "Acquire::http::Pipeline-Depth 0;" > /etc/apt/apt.conf.d/99fixbadproxy && \
4+
echo "Acquire::http::No-Cache true;" >> /etc/apt/apt.conf.d/99fixbadproxy && \
5+
echo "Acquire::BrokenProxy true;" >> /etc/apt/apt.conf.d/99fixbadproxy && \
6+
rm -rf /var/lib/apt/lists/* && \
7+
apt-get update && \
8+
apt-get install -y --fix-missing \
49
squid \
510
squid-common \
611
iproute2 \

tests/test_security_improvements.py

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import json
33
import os
44
import sys
5+
import tempfile
56
from unittest.mock import patch, MagicMock
67

78
# Add backend to path
@@ -13,17 +14,31 @@
1314
os.environ['SECRET_KEY'] = 'test_secret'
1415

1516
from app.app import app, init_db, get_db
17+
import app.app as backend_app
1618

1719
class SecurityTests(unittest.TestCase):
1820
def setUp(self):
1921
app.config['TESTING'] = True
2022
self.client = app.test_client()
2123

22-
# Setup in-memory db
24+
# Create a temporary file for the database
25+
self.db_fd, self.db_path = tempfile.mkstemp()
26+
27+
# Patch the DATABASE_PATH in the backend app module
28+
self.original_db_path = backend_app.DATABASE_PATH
29+
backend_app.DATABASE_PATH = self.db_path
30+
31+
# Initialize the database
2332
with app.app_context():
24-
# Mock database path to use in-memory
25-
with patch('app.app.DATABASE_PATH', ':memory:'):
26-
init_db()
33+
init_db()
34+
35+
def tearDown(self):
36+
# Close and remove the temporary database
37+
os.close(self.db_fd)
38+
os.unlink(self.db_path)
39+
40+
# Restore original DATABASE_PATH
41+
backend_app.DATABASE_PATH = self.original_db_path
2742

2843
def test_input_validation_ip(self):
2944
# Test invalid IP
@@ -53,6 +68,8 @@ def test_input_validation_domain(self):
5368
headers={'Authorization': 'Basic YWRtaW46YWRtaW4='},
5469
json={'domain': 'example.com', 'description': 'test'}
5570
)
71+
if response.status_code != 200:
72+
print(f"Domain validation failed: {response.get_json()}")
5673
self.assertEqual(response.status_code, 200)
5774

5875
def test_settings_update(self):

ui/Dockerfile

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,11 @@ FROM python:3.11-slim
22

33
WORKDIR /app
44

5-
# Install Flask with compatible dependencies
6-
RUN pip install --no-cache-dir flask==2.0.1 werkzeug==2.0.1 flask-basicauth==0.2.0 requests==2.28.2 python-dotenv==1.0.0
5+
# Copy requirements first to leverage Docker cache
6+
COPY requirements.txt .
7+
8+
# Install dependencies
9+
RUN pip install --no-cache-dir -r requirements.txt
710

811
# Copy application code
912
COPY . .

ui/app.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@
2222
# Configure CSP
2323
csp = {
2424
'default-src': "'self'",
25-
'script-src': "'self'",
26-
'style-src': "'self'",
27-
'img-src': "'self' data:",
28-
'font-src': "'self'"
25+
'script-src': ["'self'", "'unsafe-inline'"],
26+
'style-src': ["'self'", "'unsafe-inline'"],
27+
'img-src': ["'self'", "data:"],
28+
'font-src': ["'self'", "data:"]
2929
}
3030
talisman = Talisman(app, content_security_policy=csp, force_https=False)
3131

0 commit comments

Comments
 (0)