added refining methods

This commit is contained in:
tcsenpai 2024-11-09 13:42:18 +01:00
parent 65e6c7e0f8
commit 8be26338eb
7 changed files with 159 additions and 207 deletions

View File

@ -1,6 +1,8 @@
# OProxy # OProxy
*High-performance, transparent proxy that supports both TCP and UDP protocols.* _High-performance, transparent proxy that supports both TCP and UDP protocols._
![OProxy](./imgs/screenshot.png)
A high-performance, transparent proxy that supports both TCP and UDP protocols. A high-performance, transparent proxy that supports both TCP and UDP protocols.
@ -10,11 +12,17 @@ A high-performance, transparent proxy that supports both TCP and UDP protocols.
- Transparent TCP proxying - Transparent TCP proxying
- HTTP/HTTPS proxying without decrypting the traffic - HTTP/HTTPS proxying without decrypting the traffic
- Headers and other metadata fully preserved
- Optional UDP support - Optional UDP support
- Detailed logging capabilities - Detailed logging capabilities
- Configurable through environment variables - Configurable through environment variables
- Support for both file and stdout logging - Support for both file and stdout logging
- Data content logging (optional) - Data content logging (optional)
- Performance optimizations with configurable buffer sizes
- Real-time metrics monitoring
- Automatic log rotation
- Thread-safe metrics collection
- Throughput and connection statistics
## Requirements ## Requirements
@ -23,6 +31,28 @@ A high-performance, transparent proxy that supports both TCP and UDP protocols.
- socket - socket
- threading - threading
## Performance Features
### Metrics Monitoring
The proxy now includes built-in metrics collection and monitoring:
- Total connections tracking
- Active connections monitoring
- Bytes transferred counting
- Real-time throughput calculation
- Periodic metrics reporting (every 60 seconds)
### Performance Optimizations
- Optimized buffer sizes (65KB)
- Non-blocking I/O using select
- Socket buffer optimization
- Thread-safe operations
### Log Management
- Automatic log rotation (10MB per file)
- Up to 5 backup log files
- UTF-8 encoding support
- Compressed backup files
## Installation ## Installation
1. Clone the repository: 1. Clone the repository:
@ -39,23 +69,21 @@ cd oproxy
pip install -r requirements.txt pip install -r requirements.txt
``` ```
3. Copy the example environment file: 3. Copy the example environment file:
```bash ```bash
cp .env.example .env cp .env.example .env
``` ```
4. Edit the .env file with your configuration: 4. Edit the .env file with your configuration:
```bash ```bash
# Example: your Ollama server is running on 192.168.1.100:11434
PROXY_PORT=11434 PROXY_PORT=11434
TARGET_HOST=127.0.0.1 TARGET_HOST=192.168.1.100
TARGET_PORT=80 TARGET_PORT=11434
``` ```
## Usage ## Usage
Basic TCP proxy: Basic TCP proxy:
@ -76,12 +104,31 @@ Enable data logging with debug level:
python src/main.py --log-file proxy.log --log-data --log-level DEBUG python src/main.py --log-file proxy.log --log-data --log-level DEBUG
``` ```
Enable full data logging:
**NOTE:** This will log the entire payload of the request and response.
```bash
python src/main.py --log-file proxy.log --log-data --full-debug
```
Enable UDP support: Enable UDP support:
```bash ```bash
python src/main.py --enable-udp python src/main.py --enable-udp
``` ```
### View Metrics
Metrics are automatically logged to your configured log file or stdout. They include:
```
Performance Metrics: {
'total_connections': 150,
'active_connections': 3,
'bytes_transferred': 1048576,
'uptime_seconds': 3600,
'bytes_per_second': 291.27
}
```
## Command Line Arguments ## Command Line Arguments
@ -89,6 +136,7 @@ python src/main.py --enable-udp
- `--log-data`: Enable logging of data content - `--log-data`: Enable logging of data content
- `--log-level`: Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) - `--log-level`: Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
- `--enable-udp`: Enable UDP proxy alongside TCP - `--enable-udp`: Enable UDP proxy alongside TCP
- `--full-debug`: Enable full data logging (entire payload)
## Notes ## Notes
@ -96,6 +144,7 @@ python src/main.py --enable-udp
- UDP proxy (if enabled) runs on PROXY_PORT + 1 - UDP proxy (if enabled) runs on PROXY_PORT + 1
- Data logging should be used carefully as it may contain sensitive information - Data logging should be used carefully as it may contain sensitive information
- UDP support is experimental and runs as a daemon thread - UDP support is experimental and runs as a daemon thread
- HTTPS proxying is handled without decrypting the traffic
## License ## License

BIN
imgs/screenshot.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

174
main.py
View File

@ -1,174 +0,0 @@
import socket
import threading
import os
from dotenv import load_dotenv
import logging
from datetime import datetime
import argparse
from collections import defaultdict
# Load environment variables
load_dotenv()
# Configuration
PROXY_HOST = '0.0.0.0' # Listen on all interfaces
PROXY_PORT = int(os.getenv('PROXY_PORT', 8080))
TARGET_HOST = os.getenv('TARGET_HOST', 'localhost')
TARGET_PORT = int(os.getenv('TARGET_PORT', 80))
def setup_logging(log_file=None, log_level=logging.INFO):
# Configure logging format
log_format = '%(asctime)s - %(levelname)s - %(message)s'
# Setup basic configuration
if log_file:
logging.basicConfig(
level=log_level,
format=log_format,
handlers=[
logging.FileHandler(log_file),
logging.StreamHandler() # This will also print to stdout
]
)
else:
logging.basicConfig(
level=log_level,
format=log_format
)
def parse_args():
parser = argparse.ArgumentParser(description='Transparent TCP/UDP Proxy with logging capabilities')
parser.add_argument('--log-file', type=str, help='Path to the log file')
parser.add_argument('--log-data', action='store_true', help='Enable logging of data content')
parser.add_argument('--log-level',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='INFO',
help='Set the logging level')
parser.add_argument('--enable-udp', action='store_true', help='Enable UDP proxy alongside TCP')
return parser.parse_args()
def handle_tcp_client(client_socket, log_data=False):
client_address = client_socket.getpeername()
logging.info(f"New connection from {client_address[0]}:{client_address[1]}")
# Connect to target server
target_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
target_socket.connect((TARGET_HOST, TARGET_PORT))
logging.info(f"Connected to target {TARGET_HOST}:{TARGET_PORT}")
def forward(source, destination, direction):
try:
while True:
data = source.recv(4096)
if not data:
break
if log_data:
print("[INFO] Logging data is enabled")
src = source.getpeername()
dst = destination.getpeername()
timestamp = datetime.now().isoformat()
logging.debug(f"[{direction}] {src[0]}:{src[1]} -> {dst[0]}:{dst[1]}")
logging.debug(f"Data: {data[:1024]!r}...") # Log first 1KB of data
destination.send(data)
except Exception as e:
logging.error(f"Error in {direction}: {str(e)}")
finally:
source.close()
destination.close()
logging.info(f"Connection closed ({direction})")
# Create two threads for bidirectional communication
client_to_target = threading.Thread(
target=forward,
args=(client_socket, target_socket, "CLIENT->TARGET")
)
target_to_client = threading.Thread(
target=forward,
args=(target_socket, client_socket, "TARGET->CLIENT")
)
client_to_target.start()
target_to_client.start()
def handle_udp_proxy(log_data=False):
udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udp_socket.bind((PROXY_HOST, PROXY_PORT + 1)) # Use next port for UDP
clients = defaultdict(dict)
logging.info(f"UDP proxy listening on {PROXY_HOST}:{PROXY_PORT + 1}")
while True:
try:
data, client_addr = udp_socket.recvfrom(4096)
if log_data:
logging.debug(f"UDP: {client_addr} -> {TARGET_HOST}:{TARGET_PORT}")
logging.debug(f"Data: {data[:1024]!r}...")
# Forward to target
target_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
target_socket.sendto(data, (TARGET_HOST, TARGET_PORT))
# Store socket for this client
clients[client_addr]['socket'] = target_socket
clients[client_addr]['target'] = (TARGET_HOST, TARGET_PORT)
# Handle response in a separate thread to not block
def handle_response(client_addr, target_socket):
try:
response, _ = target_socket.recvfrom(4096)
udp_socket.sendto(response, client_addr)
if log_data:
logging.debug(f"UDP Response: {TARGET_HOST}:{TARGET_PORT} -> {client_addr}")
except Exception as e:
logging.error(f"UDP Response Error: {str(e)}")
finally:
target_socket.close()
threading.Thread(target=handle_response,
args=(client_addr, target_socket)).start()
except Exception as e:
logging.error(f"UDP Error: {str(e)}")
def main():
# Parse command line arguments
args = parse_args()
# Setup logging
log_level = getattr(logging, args.log_level)
setup_logging(args.log_file, log_level)
# Start TCP proxy (main functionality)
tcp_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp_server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
tcp_server.bind((PROXY_HOST, PROXY_PORT))
tcp_server.listen(100)
logging.info(f"TCP proxy listening on {PROXY_HOST}:{PROXY_PORT}")
logging.info(f"Forwarding to {TARGET_HOST}:{TARGET_PORT}")
logging.info(f"Logging level: {args.log_level}")
if args.log_file:
logging.info(f"Logging to file: {args.log_file}")
if args.log_data:
logging.info("Data logging is enabled")
# Start UDP proxy if enabled
if args.enable_udp:
udp_thread = threading.Thread(target=handle_udp_proxy,
args=(args.log_data,),
daemon=True)
udp_thread.start()
logging.info("UDP proxy enabled")
# Main TCP loop
while True:
client_socket, addr = tcp_server.accept()
proxy_thread = threading.Thread(
target=handle_tcp_client,
args=(client_socket, args.log_data)
)
proxy_thread.start()
if __name__ == "__main__":
main()

View File

@ -8,6 +8,7 @@ import logging
from proxy.logger import setup_logging from proxy.logger import setup_logging
from proxy.tcp_handler import TCPHandler from proxy.tcp_handler import TCPHandler
from proxy.udp_handler import UDPHandler from proxy.udp_handler import UDPHandler
from proxy.metrics import ConnectionMetrics, MetricsReporter
def parse_args(): def parse_args():
parser = argparse.ArgumentParser(description='Transparent TCP/UDP Proxy with logging capabilities') parser = argparse.ArgumentParser(description='Transparent TCP/UDP Proxy with logging capabilities')
@ -33,8 +34,13 @@ def main():
args = parse_args() args = parse_args()
setup_logging(args.log_file, getattr(logging, args.log_level)) setup_logging(args.log_file, getattr(logging, args.log_level))
# Initialize metrics
metrics = ConnectionMetrics()
metrics_reporter = MetricsReporter(metrics, interval=60)
metrics_reporter.start()
# Initialize TCP handler # Initialize TCP handler
tcp_handler = TCPHandler(TARGET_HOST, TARGET_PORT) tcp_handler = TCPHandler(TARGET_HOST, TARGET_PORT, metrics)
# Setup TCP server # Setup TCP server
tcp_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) tcp_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

View File

@ -1,20 +1,25 @@
import logging import logging
import logging.handlers
from typing import Optional from typing import Optional
def setup_logging(log_file: Optional[str] = None, log_level: int = logging.INFO) -> None: def setup_logging(log_file: Optional[str] = None, log_level: int = logging.INFO) -> None:
log_format = '%(asctime)s - %(levelname)s - %(message)s' log_format = '%(asctime)s - %(levelname)s - %(message)s'
handlers = [logging.StreamHandler()]
if log_file: if log_file:
logging.basicConfig( # Add rotating file handler
level=log_level, file_handler = logging.handlers.RotatingFileHandler(
format=log_format, log_file,
handlers=[ maxBytes=10*1024*1024, # 10MB
logging.FileHandler(log_file), backupCount=5,
logging.StreamHandler() encoding='utf-8'
]
)
else:
logging.basicConfig(
level=log_level,
format=log_format
) )
file_handler.setFormatter(logging.Formatter(log_format))
handlers.append(file_handler)
logging.basicConfig(
level=log_level,
format=log_format,
handlers=handlers
)

53
src/proxy/metrics.py Normal file
View File

@ -0,0 +1,53 @@
from dataclasses import dataclass
from datetime import datetime
import threading
import time
import logging
@dataclass
class ConnectionMetrics:
total_connections: int = 0
active_connections: int = 0
bytes_transferred: int = 0
start_time: datetime = datetime.now()
def __init__(self):
self.lock = threading.Lock()
def increment_connection(self):
with self.lock:
self.total_connections += 1
self.active_connections += 1
def decrement_active(self):
with self.lock:
self.active_connections -= 1
def add_bytes(self, bytes_count: int):
with self.lock:
self.bytes_transferred += bytes_count
def get_stats(self):
uptime = (datetime.now() - self.start_time).total_seconds()
return {
'total_connections': self.total_connections,
'active_connections': self.active_connections,
'bytes_transferred': self.bytes_transferred,
'uptime_seconds': uptime,
'bytes_per_second': self.bytes_transferred / uptime if uptime > 0 else 0
}
class MetricsReporter:
def __init__(self, metrics: ConnectionMetrics, interval: int = 60):
self.metrics = metrics
self.interval = interval
self.thread = threading.Thread(target=self._report_loop, daemon=True)
def start(self):
self.thread.start()
def _report_loop(self):
while True:
stats = self.metrics.get_stats()
logging.info(f"Performance Metrics: {stats}")
time.sleep(self.interval)

View File

@ -1,13 +1,17 @@
import socket import socket
import threading import threading
import logging import logging
import select
from datetime import datetime from datetime import datetime
from typing import Tuple, Optional from typing import Tuple, Optional
from .metrics import ConnectionMetrics
class TCPHandler: class TCPHandler:
def __init__(self, target_host: str, target_port: int): def __init__(self, target_host: str, target_port: int, metrics: ConnectionMetrics):
self.target_host = target_host self.target_host = target_host
self.target_port = target_port self.target_port = target_port
self.metrics = metrics
self.buffer_size = 65536 # Increased buffer size for better performance
def log_data_content(self, data: bytes, src: tuple, dst: tuple, direction: str, full_debug: bool = False) -> None: def log_data_content(self, data: bytes, src: tuple, dst: tuple, direction: str, full_debug: bool = False) -> None:
try: try:
@ -48,22 +52,31 @@ class TCPHandler:
direction: str, log_data: bool, full_debug: bool = False) -> None: direction: str, log_data: bool, full_debug: bool = False) -> None:
total_bytes = 0 total_bytes = 0
try: try:
while True: # Set socket options for performance
data = source.recv(4096) source.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, self.buffer_size)
if not data: destination.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, self.buffer_size)
break
total_bytes += len(data)
destination.send(data)
if log_data: # Use select for non-blocking I/O
src = source.getpeername() while True:
dst = destination.getpeername() ready = select.select([source], [], [], 1.0)
self.log_data_content(data, src, dst, direction, full_debug) if ready[0]:
data = source.recv(self.buffer_size)
if not data:
break
total_bytes += len(data)
destination.send(data)
self.metrics.add_bytes(len(data))
if log_data:
src = source.getpeername()
dst = destination.getpeername()
self.log_data_content(data, src, dst, direction, full_debug)
except Exception as e: except Exception as e:
logging.error(f"Error in {direction}: {str(e)}") logging.error(f"Error in {direction}: {str(e)}")
finally: finally:
logging.info(f"Connection closed ({direction}). Total bytes transferred: {total_bytes}") logging.info(f"Connection closed ({direction}). Total bytes transferred: {total_bytes}")
self.metrics.decrement_active()
try: try:
source.close() source.close()
destination.close() destination.close()