add all files

This commit is contained in:
Rucus
2026-02-17 09:29:34 -06:00
parent b8c8d67c67
commit 782d203799
21925 changed files with 2433086 additions and 0 deletions

View File

@@ -0,0 +1,171 @@
<?php // phpcs:ignoreFile
/**
* Report Data API
*
* Fetches aggregated data from SQL Server archive for reporting
*/
header('Content-Type: application/json');
header('Cache-Control: no-cache, must-revalidate');
require __DIR__ . '/../config.php';
$response = ['success' => false, 'error' => null, 'data' => []];
try {
$pdo = getReportDbConnection();
if (!$pdo) {
throw new Exception('Database connection failed');
}
// Parameters
$tags = isset($_GET['tags']) ? explode(',', $_GET['tags']) : [];
$startDate = $_GET['start_date'] ?? null;
$endDate = $_GET['end_date'] ?? null;
$aggregation = $_GET['aggregation'] ?? 'hourly'; // hourly, daily, raw
$statsOnly = isset($_GET['stats_only']); // Return only min/max/avg
if (empty($tags)) {
throw new Exception('No tags specified');
}
if (!$startDate || !$endDate) {
throw new Exception('Start and end dates required');
}
// Validate aggregation
$validAggregations = ['hourly', 'daily', 'raw'];
if (!in_array($aggregation, $validAggregations)) {
$aggregation = 'hourly';
}
// Build results for each tag
$results = [];
foreach ($tags as $tagName) {
$tagName = trim($tagName);
if (empty($tagName)) continue;
// Use JOIN pattern like trends/live/realtime_data.php
// No separate lookup - join archive with id_names directly
if ($statsOnly) {
// Return summary statistics only (fast)
// CAST Value to FLOAT since it's stored as VARCHAR
$sql = "SELECT
MIN(CAST(a.Value AS FLOAT)) as min_val,
MAX(CAST(a.Value AS FLOAT)) as max_val,
AVG(CAST(a.Value AS FLOAT)) as avg_val,
COUNT(*) as sample_count,
MIN(a.TimeStamp) as first_sample,
MAX(a.TimeStamp) as last_sample
FROM dbo.archive a
INNER JOIN dbo.id_names n ON a.ID = n.idnumber
WHERE n.name = :tag_name
AND a.TimeStamp BETWEEN :start_date AND :end_date";
$stmt = $pdo->prepare($sql);
$stmt->execute([
':tag_name' => $tagName,
':start_date' => $startDate,
':end_date' => $endDate
]);
$stats = $stmt->fetch();
if (!$stats || $stats['sample_count'] == 0) {
$results[$tagName] = ['error' => 'No data found for this tag/date range'];
continue;
}
$results[$tagName] = [
'min' => $stats['min_val'] !== null ? round((float)$stats['min_val'], 2) : null,
'max' => $stats['max_val'] !== null ? round((float)$stats['max_val'], 2) : null,
'avg' => $stats['avg_val'] !== null ? round((float)$stats['avg_val'], 2) : null,
'samples' => (int)$stats['sample_count'],
'first' => $stats['first_sample'],
'last' => $stats['last_sample']
];
} elseif ($aggregation === 'hourly') {
// Hourly aggregation
$sql = "SELECT
DATEADD(HOUR, DATEDIFF(HOUR, 0, a.TimeStamp), 0) as time_bucket,
MIN(CAST(a.Value AS FLOAT)) as min_val,
MAX(CAST(a.Value AS FLOAT)) as max_val,
AVG(CAST(a.Value AS FLOAT)) as avg_val,
COUNT(*) as samples
FROM dbo.archive a
INNER JOIN dbo.id_names n ON a.ID = n.idnumber
WHERE n.name = :tag_name
AND a.TimeStamp BETWEEN :start_date AND :end_date
GROUP BY DATEADD(HOUR, DATEDIFF(HOUR, 0, a.TimeStamp), 0)
ORDER BY time_bucket";
$stmt = $pdo->prepare($sql);
$stmt->execute([
':tag_name' => $tagName,
':start_date' => $startDate,
':end_date' => $endDate
]);
$results[$tagName] = $stmt->fetchAll();
} elseif ($aggregation === 'daily') {
// Daily aggregation
$sql = "SELECT
CAST(a.TimeStamp AS DATE) as time_bucket,
MIN(CAST(a.Value AS FLOAT)) as min_val,
MAX(CAST(a.Value AS FLOAT)) as max_val,
AVG(CAST(a.Value AS FLOAT)) as avg_val,
COUNT(*) as samples
FROM dbo.archive a
INNER JOIN dbo.id_names n ON a.ID = n.idnumber
WHERE n.name = :tag_name
AND a.TimeStamp BETWEEN :start_date AND :end_date
GROUP BY CAST(a.TimeStamp AS DATE)
ORDER BY time_bucket";
$stmt = $pdo->prepare($sql);
$stmt->execute([
':tag_name' => $tagName,
':start_date' => $startDate,
':end_date' => $endDate
]);
$results[$tagName] = $stmt->fetchAll();
} else {
// Raw data (limited)
$sql = "SELECT a.TimeStamp, a.Value
FROM dbo.archive a
INNER JOIN dbo.id_names n ON a.ID = n.idnumber
WHERE n.name = :tag_name
AND a.TimeStamp BETWEEN :start_date AND :end_date
ORDER BY a.TimeStamp
OFFSET 0 ROWS FETCH NEXT :limit ROWS ONLY";
$stmt = $pdo->prepare($sql);
$stmt->bindValue(':tag_name', $tagName);
$stmt->bindValue(':start_date', $startDate);
$stmt->bindValue(':end_date', $endDate);
$stmt->bindValue(':limit', REPORT_MAX_ROWS, PDO::PARAM_INT);
$stmt->execute();
$results[$tagName] = $stmt->fetchAll();
}
}
$response['success'] = true;
$response['data'] = $results;
$response['params'] = [
'start_date' => $startDate,
'end_date' => $endDate,
'aggregation' => $aggregation,
'stats_only' => $statsOnly
];
} catch (Exception $e) {
$response['error'] = $e->getMessage();
}
echo json_encode($response, JSON_PRETTY_PRINT);

View File

@@ -0,0 +1,42 @@
<?php // phpcs:ignoreFile
/**
* Debug endpoint - list tags from id_names table
*/
header('Content-Type: application/json');
require __DIR__ . '/../config.php';
$pdo = getReportDbConnection();
if (!$pdo) {
echo json_encode(['error' => 'Connection failed']);
exit;
}
$search = $_GET['search'] ?? '';
if ($search) {
// Search for tags containing the search term
$stmt = $pdo->prepare("
SELECT TOP 50 idnumber, name, description
FROM dbo.id_names
WHERE name LIKE :search OR description LIKE :search
ORDER BY name
");
$stmt->execute([':search' => '%' . $search . '%']);
} else {
// Return first 100 tags
$stmt = $pdo->prepare("
SELECT TOP 100 idnumber, name, description
FROM dbo.id_names
ORDER BY name
");
$stmt->execute();
}
$tags = $stmt->fetchAll();
echo json_encode([
'count' => count($tags),
'tags' => $tags
], JSON_PRETTY_PRINT);