Disable Logging
Disable Logging
Control logging behavior at runtime with the ability to enable, disable, and selectively manage log output.
Disable All Logging
Completely disable logging for performance-critical sections:
Python
from bytehide_logs import Log
Log.info("This message is logged")
Log.disable()
Log.info("This message is NOT logged")
Log.enable()
Log.info("This message is logged again")from bytehide_logs import Log
Log.info("This message is logged")
Log.disable()
Log.info("This message is NOT logged")
Log.enable()
Log.info("This message is logged again")Enable Logging
Re-enable logging after it has been disabled:
Python
Log.disable()
# ... perform logging-intensive operations ...
Log.enable()Log.disable()
# ... perform logging-intensive operations ...
Log.enable()Disabling in Development vs Production
Development Scenario
Keep full logging enabled for debugging:
Python
import os
if os.getenv("ENV") != "production":
# Development: keep logging enabled
pass
else:
# Production: may disable logging for specific operations
Log.disable()import os
if os.getenv("ENV") != "production":
# Development: keep logging enabled
pass
else:
# Production: may disable logging for specific operations
Log.disable()Production Scenario
Disable logging during performance-critical operations:
Python
def performance_critical_operation():
"""Operation that shouldn't be logged for performance reasons"""
Log.disable()
try:
# High-throughput operations
for i in range(1000000):
process_data(i)
finally:
Log.enable()def performance_critical_operation():
"""Operation that shouldn't be logged for performance reasons"""
Log.disable()
try:
# High-throughput operations
for i in range(1000000):
process_data(i)
finally:
Log.enable()Conditional Logging
Based on Environment
Python
import os
from bytehide_logs import Log, LogSettings, LogLevel
def setup_logging():
env = os.getenv("ENV", "development")
if env == "production":
# Minimal logging
Log.initialize(LogSettings(
minimum_level=LogLevel.CRITICAL,
console_enabled=False
))
elif env == "staging":
# Moderate logging
Log.initialize(LogSettings(
minimum_level=LogLevel.INFO,
console_enabled=True
))
else:
# Full logging
Log.initialize(LogSettings(
minimum_level=LogLevel.DEBUG,
console_enabled=True
))import os
from bytehide_logs import Log, LogSettings, LogLevel
def setup_logging():
env = os.getenv("ENV", "development")
if env == "production":
# Minimal logging
Log.initialize(LogSettings(
minimum_level=LogLevel.CRITICAL,
console_enabled=False
))
elif env == "staging":
# Moderate logging
Log.initialize(LogSettings(
minimum_level=LogLevel.INFO,
console_enabled=True
))
else:
# Full logging
Log.initialize(LogSettings(
minimum_level=LogLevel.DEBUG,
console_enabled=True
))Based on Feature Flags
Python
from bytehide_logs import Log
FEATURE_FLAGS = {
"debug_mode": False,
"verbose_logging": False
}
def log_if_enabled(message, level="info"):
if FEATURE_FLAGS["verbose_logging"]:
getattr(Log, level)(message)
elif FEATURE_FLAGS["debug_mode"]:
Log.debug(message)
log_if_enabled("Detailed operation info")from bytehide_logs import Log
FEATURE_FLAGS = {
"debug_mode": False,
"verbose_logging": False
}
def log_if_enabled(message, level="info"):
if FEATURE_FLAGS["verbose_logging"]:
getattr(Log, level)(message)
elif FEATURE_FLAGS["debug_mode"]:
Log.debug(message)
log_if_enabled("Detailed operation info")Disabling Specific Log Levels
Filter by Minimum Level
Instead of disabling completely, filter by level:
Python
from bytehide_logs import Log, LogSettings, LogLevel
# Only WARN and above
settings = LogSettings(minimum_level=LogLevel.WARN)
Log.initialize(settings)
Log.debug("Not logged")
Log.info("Not logged")
Log.warn("Logged")
Log.error("Logged")
Log.critical("Logged")from bytehide_logs import Log, LogSettings, LogLevel
# Only WARN and above
settings = LogSettings(minimum_level=LogLevel.WARN)
Log.initialize(settings)
Log.debug("Not logged")
Log.info("Not logged")
Log.warn("Logged")
Log.error("Logged")
Log.critical("Logged")Suppress Console, Keep File
Disable console output while keeping file persistence:
Python
settings = LogSettings(
console_enabled=False, # Disable console
persist=True, # Keep file logging
file_path="./logs/app.log"
)
Log.initialize(settings)settings = LogSettings(
console_enabled=False, # Disable console
persist=True, # Keep file logging
file_path="./logs/app.log"
)
Log.initialize(settings)Suppress File, Keep Console
Python
settings = LogSettings(
console_enabled=True, # Keep console
persist=False # Disable file
)
Log.initialize(settings)settings = LogSettings(
console_enabled=True, # Keep console
persist=False # Disable file
)
Log.initialize(settings)Performance Optimization
Disable Logging During Batch Operations
Python
def process_large_dataset():
"""Process data without logging overhead"""
Log.info("Starting batch processing")
Log.disable()
try:
for item in large_dataset:
expensive_operation(item)
# No logging, better performance
finally:
Log.enable()
Log.info("Batch processing completed")def process_large_dataset():
"""Process data without logging overhead"""
Log.info("Starting batch processing")
Log.disable()
try:
for item in large_dataset:
expensive_operation(item)
# No logging, better performance
finally:
Log.enable()
Log.info("Batch processing completed")Use Higher Log Levels for High-Throughput
Python
from bytehide_logs import Log, LogSettings, LogLevel
# For high-throughput applications
settings = LogSettings(
minimum_level=LogLevel.ERROR, # Only errors and above
console_enabled=False,
persist=True,
file_path="./logs/app.log"
)
Log.initialize(settings)from bytehide_logs import Log, LogSettings, LogLevel
# For high-throughput applications
settings = LogSettings(
minimum_level=LogLevel.ERROR, # Only errors and above
console_enabled=False,
persist=True,
file_path="./logs/app.log"
)
Log.initialize(settings)Selective Logging
Log Only Errors and Critical
Python
settings = LogSettings(minimum_level=LogLevel.ERROR)
Log.initialize(settings)
# Only these are logged:
Log.error("Error occurred")
Log.critical("Critical issue")
# These are not logged:
Log.info("Operation completed")
Log.warn("Performance degraded")settings = LogSettings(minimum_level=LogLevel.ERROR)
Log.initialize(settings)
# Only these are logged:
Log.error("Error occurred")
Log.critical("Critical issue")
# These are not logged:
Log.info("Operation completed")
Log.warn("Performance degraded")Contextual Disabling
Python
class DatabaseConnection:
def __init__(self, verbose=False):
self.verbose = verbose
def execute_query(self, query):
if self.verbose:
Log.debug(f"Executing: {query}")
result = self._run_query(query)
if self.verbose:
Log.debug(f"Result rows: {len(result)}")
return resultclass DatabaseConnection:
def __init__(self, verbose=False):
self.verbose = verbose
def execute_query(self, query):
if self.verbose:
Log.debug(f"Executing: {query}")
result = self._run_query(query)
if self.verbose:
Log.debug(f"Result rows: {len(result)}")
return resultToggle Logging at Runtime
Simple Toggle Function
Python
class LoggingManager:
_enabled = True
@classmethod
def disable(cls):
cls._enabled = False
Log.disable()
@classmethod
def enable(cls):
cls._enabled = True
Log.enable()
@classmethod
def is_enabled(cls):
return cls._enabled
# Usage
LoggingManager.disable()
# ... performance-critical section ...
LoggingManager.enable()class LoggingManager:
_enabled = True
@classmethod
def disable(cls):
cls._enabled = False
Log.disable()
@classmethod
def enable(cls):
cls._enabled = True
Log.enable()
@classmethod
def is_enabled(cls):
return cls._enabled
# Usage
LoggingManager.disable()
# ... performance-critical section ...
LoggingManager.enable()With Context Manager
Python
from contextlib import contextmanager
@contextmanager
def logging_disabled():
"""Context manager to temporarily disable logging"""
Log.disable()
try:
yield
finally:
Log.enable()
# Usage
with logging_disabled():
expensive_operation()
another_expensive_operation()
# Logging is automatically re-enabled
Log.info("Operations completed")from contextlib import contextmanager
@contextmanager
def logging_disabled():
"""Context manager to temporarily disable logging"""
Log.disable()
try:
yield
finally:
Log.enable()
# Usage
with logging_disabled():
expensive_operation()
another_expensive_operation()
# Logging is automatically re-enabled
Log.info("Operations completed")HTTP Request Logging Control
Python
from flask import Flask
from bytehide_logs import Log
app = Flask(__name__)
# List of endpoints that shouldn't log requests
QUIET_ENDPOINTS = ["/health", "/metrics", "/status"]
@app.before_request
def log_request():
if request.path not in QUIET_ENDPOINTS:
Log.info(f"Request: {request.method} {request.path}")
else:
# Disable logging for health checks
Log.disable()
@app.after_request
def log_response(response):
if request.path in QUIET_ENDPOINTS:
Log.enable()
else:
Log.info(f"Response: {response.status_code}")
return responsefrom flask import Flask
from bytehide_logs import Log
app = Flask(__name__)
# List of endpoints that shouldn't log requests
QUIET_ENDPOINTS = ["/health", "/metrics", "/status"]
@app.before_request
def log_request():
if request.path not in QUIET_ENDPOINTS:
Log.info(f"Request: {request.method} {request.path}")
else:
# Disable logging for health checks
Log.disable()
@app.after_request
def log_response(response):
if request.path in QUIET_ENDPOINTS:
Log.enable()
else:
Log.info(f"Response: {response.status_code}")
return responseDatabase Query Logging Control
Python
from bytehide_logs import Log
class DatabaseLogger:
def __init__(self, log_queries=True):
self.log_queries = log_queries
def execute(self, query):
if self.log_queries:
Log.debug(f"Executing query: {query}")
result = self._db_execute(query)
if self.log_queries:
Log.debug(f"Query completed, rows affected: {len(result)}")
return resultfrom bytehide_logs import Log
class DatabaseLogger:
def __init__(self, log_queries=True):
self.log_queries = log_queries
def execute(self, query):
if self.log_queries:
Log.debug(f"Executing query: {query}")
result = self._db_execute(query)
if self.log_queries:
Log.debug(f"Query completed, rows affected: {len(result)}")
return resultBackground Task Logging
Python
import asyncio
from bytehide_logs import Log
async def background_task(verbose=False):
"""Background task with optional logging"""
if not verbose:
Log.disable()
try:
# Perform background work
result = await perform_work()
if verbose:
Log.info(f"Background task completed: {result}")
finally:
Log.enable()import asyncio
from bytehide_logs import Log
async def background_task(verbose=False):
"""Background task with optional logging"""
if not verbose:
Log.disable()
try:
# Perform background work
result = await perform_work()
if verbose:
Log.info(f"Background task completed: {result}")
finally:
Log.enable()Flush Before Disabling
Ensure logs are written before disabling:
Python
from bytehide_logs import Log
# Flush any pending logs
Log.flush()
# Now disable
Log.disable()
# Perform operations
# ...
# Enable and flush again
Log.enable()
Log.flush()from bytehide_logs import Log
# Flush any pending logs
Log.flush()
# Now disable
Log.disable()
# Perform operations
# ...
# Enable and flush again
Log.enable()
Log.flush()Testing Configuration
Disable Logging in Tests
Python
import pytest
from bytehide_logs import Log
@pytest.fixture(autouse=True)
def disable_logging():
"""Disable logging for all tests"""
Log.disable()
yield
Log.enable()
def test_something():
# Logging is disabled during test
passimport pytest
from bytehide_logs import Log
@pytest.fixture(autouse=True)
def disable_logging():
"""Disable logging for all tests"""
Log.disable()
yield
Log.enable()
def test_something():
# Logging is disabled during test
passEnable Only Errors in Tests
Python
import pytest
from bytehide_logs import Log, LogSettings, LogLevel
@pytest.fixture(scope="session", autouse=True)
def configure_logging():
"""Configure minimal logging for tests"""
Log.initialize(LogSettings(
minimum_level=LogLevel.ERROR,
console_enabled=False
))import pytest
from bytehide_logs import Log, LogSettings, LogLevel
@pytest.fixture(scope="session", autouse=True)
def configure_logging():
"""Configure minimal logging for tests"""
Log.initialize(LogSettings(
minimum_level=LogLevel.ERROR,
console_enabled=False
))Disable Caller Information
Reduce logging overhead by disabling caller info:
Python
settings = LogSettings(
console_enabled=True,
include_caller_info=False # Faster, no file/line info
)
Log.initialize(settings)settings = LogSettings(
console_enabled=True,
include_caller_info=False # Faster, no file/line info
)
Log.initialize(settings)Monitoring Logging Status
Python
def get_logging_status():
"""Get current logging configuration"""
return {
"console_enabled": settings.console_enabled,
"persist_enabled": settings.persist,
"minimum_level": settings.minimum_level.name,
"include_caller_info": settings.include_caller_info
}
# Usage
status = get_logging_status()
Log.info(f"Logging status: {status}")def get_logging_status():
"""Get current logging configuration"""
return {
"console_enabled": settings.console_enabled,
"persist_enabled": settings.persist,
"minimum_level": settings.minimum_level.name,
"include_caller_info": settings.include_caller_info
}
# Usage
status = get_logging_status()
Log.info(f"Logging status: {status}")Best Practices
- Temporary Disabling - Use context managers for temporary disabling
- Always Re-enable - Use try/finally to ensure logging is re-enabled
- Test Impact - Verify performance improvements from disabling
- Documentation - Document why logging is disabled for sections
- Monitoring - Log when logging is disabled/enabled for debugging
Complete Example
Python
from bytehide_logs import Log, LogSettings, LogLevel
from contextlib import contextmanager
@contextmanager
def temporary_logging_disabled():
"""Temporarily disable logging"""
Log.disable()
try:
yield
finally:
Log.enable()
def setup_application():
settings = LogSettings(
console_enabled=True,
minimum_level=LogLevel.INFO
)
Log.initialize(settings)
if __name__ == "__main__":
setup_application()
Log.info("Application started")
# Performance-critical section
with temporary_logging_disabled():
for i in range(1000):
expensive_operation(i)
Log.info("Processing completed")
Log.flush()from bytehide_logs import Log, LogSettings, LogLevel
from contextlib import contextmanager
@contextmanager
def temporary_logging_disabled():
"""Temporarily disable logging"""
Log.disable()
try:
yield
finally:
Log.enable()
def setup_application():
settings = LogSettings(
console_enabled=True,
minimum_level=LogLevel.INFO
)
Log.initialize(settings)
if __name__ == "__main__":
setup_application()
Log.info("Application started")
# Performance-critical section
with temporary_logging_disabled():
for i in range(1000):
expensive_operation(i)
Log.info("Processing completed")
Log.flush()Next Steps
- Quick Start - Initialize logging
- Log Levels - Filter by severity
- Configuration - All options