/**
* Performance Optimization Module
* @author Ali Eren Bektaş
* @version 2.3.1
*/
export class PerformanceOptimizer {
constructor(config = {}) {
this.metrics = {
FCP: null, // First Contentful Paint
LCP: null, // Largest Contentful Paint
CLS: null, // Cumulative Layout Shift
TTI: null, // Time to Interactive
TBT: null // Total Blocking Time
};
this.thresholds = {
...{
FCP: 1800, // 1.8s
LCP: 2500, // 2.5s
CLS: 0.1, // 0.1
TTI: 3800, // 3.8s
TBT: 200 // 200ms
},
...config.thresholds
};
this.listeners = [];
this.isMonitoring = false;
this.optimizations = {
lazyLoading: config.lazyLoading ?? true,
imageOptimization: config.imageOptimization ?? true,
cssMinification: config.cssMinification ?? true,
codeRazor: config.codeRazor ?? false
};
// Initialize system
this.init();
}
init() {
// Register performance observers
if ('PerformanceObserver' in window) {
this.registerFCPObserver();
this.registerLCPObserver();
this.registerCLSObserver();
this.registerLayoutShiftObserver();
console.info('Performance monitoring initialized successfully');
} else {
console.warn('PerformanceObserver API not supported');
}
// Apply optimizations if configured
if (this.optimizations.lazyLoading) {
this.enableLazyLoading();
}
if (this.optimizations.imageOptimization) {
this.optimizeImages();
}
return this;
}
registerFCPObserver() {
const fcpObserver = new PerformanceObserver(entries => {
const fcpEntry = entries.getEntries().pop();
this.metrics.FCP = fcpEntry.startTime;
this.evaluateMetric('FCP', fcpEntry.startTime);
});
fcpObserver.observe({ type: 'paint', buffered: true });
}
enableLazyLoading() {
if ('loading' in HTMLImageElement.prototype) {
document.querySelectorAll('img').forEach(img => {
if (!img.loading) {
img.loading = 'lazy';
}
});
} else {
// Fallback lazy loading implementation
this.implementIntersectionObserver();
}
}
evaluatePerformance() {
const scores = {};
let overallScore = 0;
Object.entries(this.metrics).forEach(([metric, value]) => {
if (value !== null) {
const score = this.calculateScore(metric, value);
scores[metric] = score;
overallScore += score * this.getMetricWeight(metric);
}
});
return {
scores,
overall: Math.round(overallScore * 100) / 100,
metrics: { ...this.metrics }
};
}
// Advanced optimization techniques would be implemented here
}
"""
Advanced Data Processing Module
Author: Ali Eren Bektaş
Version: 1.4.2
"""
import pandas as pd
import numpy as np
from typing import Dict, List, Optional, Union, Tuple
from dataclasses import dataclass
import logging
from concurrent.futures import ThreadPoolExecutor
from functools import lru_cache
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
@dataclass
class ProcessingConfig:
"""Configuration parameters for the data processor"""
batch_size: int = 10000
threads: int = 4
normalize: bool = True
handle_missing: str = 'interpolate' # 'interpolate', 'drop', or 'fill'
outlier_detection: bool = True
cache_results: bool = True
class DataProcessor:
"""
Advanced data processing and analysis class with optimization
for large datasets and parallel processing capabilities.
"""
def __init__(self, config: Optional[ProcessingConfig] = None):
"""Initialize the processor with configuration"""
self.config = config or ProcessingConfig()
self.data = None
self.metadata = {}
logger.info(f"DataProcessor initialized with config: {self.config}")
def load_data(self, source: Union[str, pd.DataFrame], **kwargs) -> pd.DataFrame:
"""Load data from various sources with error handling"""
try:
if isinstance(source, str):
# File path handling
if source.endswith('.csv'):
self.data = pd.read_csv(source, **kwargs)
elif source.endswith(('.xls', '.xlsx')):
self.data = pd.read_excel(source, **kwargs)
elif source.endswith('.json'):
self.data = pd.read_json(source, **kwargs)
elif source.endswith('.parquet'):
self.data = pd.read_parquet(source, **kwargs)
else:
raise ValueError(f"Unsupported file format: {source}")
else:
# DataFrame directly provided
self.data = source.copy()
# Collect basic metadata
self._collect_metadata()
logger.info(f"Data loaded: {len(self.data)} rows, {len(self.data.columns)} columns")
return self.data
except Exception as e:
logger.error(f"Error loading data: {str(e)}")
raise
def _collect_metadata(self) -> Dict:
"""Collect metadata about the dataset"""
if self.data is None:
return {}
self.metadata = {
'shape': self.data.shape,
'columns': list(self.data.columns),
'dtypes': self.data.dtypes.to_dict(),
'missing_values': self.data.isnull().sum().to_dict(),
'memory_usage': self.data.memory_usage(deep=True).sum()
}
# Add numeric columns statistics
numeric_cols = self.data.select_dtypes(include=['number']).columns
if len(numeric_cols) > 0:
self.metadata['numeric_stats'] = self.data[numeric_cols].describe().to_dict()
return self.metadata
@lru_cache(maxsize=32)
def process_batch(self, batch_idx: int) -> pd.DataFrame:
"""Process a single batch of data with caching for performance"""
start_idx = batch_idx * self.config.batch_size
end_idx = min(start_idx + self.config.batch_size, len(self.data))
batch = self.data.iloc[start_idx:end_idx].copy()
# Apply processing steps based on configuration
if self.config.normalize:
batch = self._normalize_batch(batch)
if self.config.outlier_detection:
batch = self._detect_outliers(batch)
if self.config.handle_missing != 'drop':
batch = self._handle_missing_values(batch)
return batch
def process(self) -> pd.DataFrame:
"""Process the entire dataset in parallel batches"""
if self.data is None or len(self.data) == 0:
logger.warning("No data to process")
return pd.DataFrame()
total_batches = (len(self.data) + self.config.batch_size - 1) // self.config.batch_size
logger.info(f"Processing {total_batches} batches using {self.config.threads} threads")
processed_data = []
with ThreadPoolExecutor(max_workers=self.config.threads) as executor:
# Process batches in parallel
futures = [executor.submit(self.process_batch, i) for i in range(total_batches)]
for future in futures:
processed_data.append(future.result())
# Combine processed batches
result = pd.concat(processed_data, ignore_index=True)
logger.info(f"Processing complete. Result shape: {result.shape}")
return result
/**
* Advanced API Service Implementation
*
* @author Ali Eren Bektaş
* @version 3.1.0
*/
declare(strict_types=1);
namespace App\Services;
use App\Contracts\ApiServiceInterface;
use App\Exceptions\ApiException;
use App\Models\ApiRequest;
use App\Models\ApiResponse;
use App\Repositories\CacheRepository;
use App\ValueObjects\RequestParameters;
use GuzzleHttp\Client;
use GuzzleHttp\Exception\GuzzleException;
use Psr\Log\LoggerInterface;
use Symfony\Component\HttpFoundation\Response;
class ApiService implements ApiServiceInterface
{
// Default configuration options
private array $defaultConfig = [
'timeout' => 30,
'connect_timeout' => 10,
'http_errors' => true,
'verify' => true,
'headers' => [
'Accept' => 'application/json',
'Content-Type' => 'application/json',
],
];
// Supported HTTP methods
private const SUPPORTED_METHODS = [
'GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'HEAD', 'OPTIONS'
];
/**
* @param Client $httpClient HTTP client for making requests
* @param LoggerInterface $logger Logger service
* @param CacheRepository $cache Cache repository for response caching
* @param array $config Additional configuration options
*/
public function __construct(
private readonly Client $httpClient,
private readonly LoggerInterface $logger,
private readonly CacheRepository $cache,
private readonly array $config = []
) {
// Merge configuration with defaults
$this->config = array_merge($this->defaultConfig, $config);
}
/**
* Make an API request with advanced error handling and caching
*
* @param string $method HTTP method
* @param string $endpoint API endpoint
* @param RequestParameters $parameters Request parameters
* @return ApiResponse The API response object
* @throws ApiException If an error occurs during the request
*/
public function request(
string $method,
string $endpoint,
RequestParameters $parameters
): ApiResponse {
// Validate HTTP method
$method = strtoupper($method);
if (!in_array($method, self::SUPPORTED_METHODS, true)) {
throw new ApiException(
sprintf('Unsupported HTTP method: %s', $method),
Response::HTTP_BAD_REQUEST
);
}
// Prepare request options
$options = $this->prepareRequestOptions($parameters);
// Generate cache key if caching is enabled
$cacheKey = null;
if ($parameters->shouldCache() && $method === 'GET') {
$cacheKey = $this->generateCacheKey($method, $endpoint, $options);
// Try to get from cache
$cachedResponse = $this->cache->get($cacheKey);
if ($cachedResponse !== null) {
$this->logger->info('Retrieved response from cache', [
'endpoint' => $endpoint,
'cache_key' => $cacheKey
]);
return $cachedResponse;
}
}
// Log the request
$this->logger->info('Making API request', [
'method' => $method,
'endpoint' => $endpoint,
'options' => array_diff_key($options, ['headers' => true])
]);
// Create request model for tracking
$request = new ApiRequest(
method: $method,
url: $endpoint,
parameters: $parameters
);
try {
// Make the HTTP request
$startTime = microtime(true);
$httpResponse = $this->httpClient->request($method, $endpoint, $options);
$endTime = microtime(true);
// Parse response body
$body = $httpResponse->getBody()->getContents();
$statusCode = $httpResponse->getStatusCode();
$responseData = $this->parseResponseBody($body);
// Create response object
$response = new ApiResponse(
statusCode: $statusCode,
data: $responseData,
headers: $httpResponse->getHeaders(),
request: $request,
executionTime: round(($endTime - $startTime) * 1000, 2)
);
// Cache the response if requested
if ($cacheKey !== null && $statusCode < 400) {
$this->cache->set(
$cacheKey,
$response,
$parameters->getCacheTtl()
);
}
return $response;
} catch (GuzzleException $e) {
$this->logger->error('API request failed', [
'method' => $method,
'endpoint' => $endpoint,
'error' => $e->getMessage(),
'code' => $e->getCode()
]);
throw new ApiException(
sprintf('API request failed: %s', $e->getMessage()),
$e->getCode() ?: Response::HTTP_BAD_GATEWAY,
$e
);
}
}
/**
* Prepare request options from parameters
*
* @param RequestParameters $parameters
* @return array
*/
private function prepareRequestOptions(RequestParameters $parameters): array
{
$options = $this->config;
// Add query parameters
if ($parameters->hasQueryParams()) {
$options['query'] = $parameters->getQueryParams();
}
// Add request body
if ($parameters->hasBody()) {
$options['json'] = $parameters->getBody();
}
// Add custom headers
if ($parameters->hasHeaders()) {
$options['headers'] = array_merge(
$options['headers'],
$parameters->getHeaders()
);
}
// Add authentication if provided
if ($parameters->hasAuthentication()) {
$auth = $parameters->getAuthentication();
if (isset($auth['type']) && $auth['type'] === 'bearer') {
$options['headers']['Authorization'] = 'Bearer ' . $auth['token'];
} elseif (isset($auth['type']) && $auth['type'] === 'basic') {
$options['auth'] = [$auth['username'], $auth['password']];
}
}
return $options;
}
/**
* Parse response body based on content type
*
* @param string $body Response body
* @return array|string Parsed response
*/
private function parseResponseBody(string $body): array|string
{
if (empty($body)) {
return [];
}
try {
$decoded = json_decode($body, true, 512, JSON_THROW_ON_ERROR);
return is_array($decoded) ? $decoded : ['data' => $decoded];
} catch (\JsonException $e) {
// Return raw body if it's not valid JSON
return $body;
}
}
/**
* Generate a unique cache key for the request
*
* @param string $method HTTP method
* @param string $endpoint API endpoint
* @param array $options Request options
* @return string Cache key
*/
private function generateCacheKey(
string $method,
string $endpoint,
array $options
): string {
// Create a stable representation of options for hashing
$optionsJson = json_encode(
array_intersect_key($options, ['query' => true]),
JSON_THROW_ON_ERROR
);
return md5($method . $endpoint . $optionsJson);
}
}