This commit is contained in:
@@ -31,7 +31,7 @@ namespace AyaNova.Api.Controllers
|
||||
private readonly AyContext ct;
|
||||
private readonly ILogger<LogFilesController> log;
|
||||
private readonly ApiServerState serverState;
|
||||
|
||||
private const int MAX_RECORDS_BEFORE_DOWNSAMPLING = 400;
|
||||
|
||||
/// <summary>
|
||||
/// ctor
|
||||
@@ -52,11 +52,12 @@ namespace AyaNova.Api.Controllers
|
||||
/// <summary>
|
||||
/// Get all server metrics for time period specified
|
||||
/// </summary>
|
||||
/// <param name="hours">Required value, timespan of hours worth of records to return from current moment backwards</param>
|
||||
/// <param name="maxRecords">Optional maximum records to return. If there are more records for the time period selected than this value the result will be downsampled using Largest-Triangle-Three-Buckets algorithm</param>
|
||||
/// <param name="tsStart">Start timestamp UTC</param>
|
||||
/// <param name="tsEnd">End timestamp UTC</param>
|
||||
/// <param name="maxRecords">Optional maximum records to return. If there are more records for the time period selected than this value the result will be downsampled. There is a 400 record maximum fixed default</param>
|
||||
/// <returns>Snapshot of metrics</returns>
|
||||
[HttpGet]
|
||||
public async Task<IActionResult> GetMetrics([FromQuery] int? hours, [FromQuery] int? maxRecords)
|
||||
public async Task<IActionResult> GetMetrics([FromQuery] DateTime tsStart, [FromQuery] DateTime tsEnd, [FromQuery] int? maxRecords)
|
||||
{
|
||||
if (serverState.IsClosed)
|
||||
return StatusCode(503, new ApiErrorResponse(serverState.ApiErrorCode, null, serverState.Reason));
|
||||
@@ -66,98 +67,97 @@ namespace AyaNova.Api.Controllers
|
||||
return StatusCode(403, new ApiNotAuthorizedResponse());
|
||||
}
|
||||
//use specified values or just return all
|
||||
maxRecords ??= int.MaxValue;
|
||||
List<MetricMM> MinuteMetrics = new List<MetricMM>();
|
||||
//Query the data and downsample if required
|
||||
maxRecords ??= MAX_RECORDS_BEFORE_DOWNSAMPLING;
|
||||
|
||||
if (hours != null)
|
||||
{
|
||||
DateTime maxDate = DateTime.UtcNow.Subtract(new TimeSpan((int)hours, 0, 0, 0));
|
||||
MinuteMetrics = await ct.MetricMM.AsNoTracking().Where(z => z.t > maxDate).OrderBy(z => z.t).ToListAsync();
|
||||
}
|
||||
else
|
||||
{
|
||||
MinuteMetrics = await ct.MetricMM.AsNoTracking().OrderBy(z => z.t).ToListAsync();
|
||||
}
|
||||
List<MetricMM> MinuteMetrics = new List<MetricMM>();
|
||||
MinuteMetrics = await ct.MetricMM.AsNoTracking().Where(z => z.t >= tsStart && z.t <= tsEnd).OrderBy(z => z.t).ToListAsync();
|
||||
|
||||
var ret = new
|
||||
{
|
||||
MetricMM = new
|
||||
{
|
||||
labels = MinuteMetrics.Select(z => z.t).ToArray(),
|
||||
cpu = MinuteMetrics.Select(z => z.CPU).ToArray(),
|
||||
gen0 = MinuteMetrics.Select(z => z.Gen0).ToArray(),
|
||||
gen1 = MinuteMetrics.Select(z => z.Gen1).ToArray(),
|
||||
gen2 = MinuteMetrics.Select(z => z.Gen2).ToArray(),
|
||||
allocated = MinuteMetrics.Select(z => z.Allocated).ToArray(),
|
||||
workingSet = MinuteMetrics.Select(z => z.WorkingSet).ToArray(),
|
||||
privateBytes = MinuteMetrics.Select(z => z.PrivateBytes).ToArray()
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
//Log
|
||||
await EventLogProcessor.LogEventToDatabaseAsync(new Event(UserIdFromContext.Id(HttpContext.Items), 0, AyaType.Metrics, AyaEvent.Retrieved), ct);
|
||||
|
||||
return Ok(ApiOkResponse.Response(ret));
|
||||
|
||||
|
||||
|
||||
//Downsample? This should work for longer time period metrics because the minute ones are the greatest quantity
|
||||
if (maxRecords < MinuteMetrics.Count)
|
||||
{
|
||||
//yes, so need to return individual labels and downsampled data as they wont' sync anymore
|
||||
|
||||
var dsCPU = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.CPU)).ToList();
|
||||
dsCPU = Util.DataUtil.LargestTriangleThreeBuckets(dsCPU, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsAllocated = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.Allocated)).ToList();
|
||||
dsAllocated = Util.DataUtil.LargestTriangleThreeBuckets(dsAllocated, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsWorkingSet = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.WorkingSet)).ToList();
|
||||
dsWorkingSet = Util.DataUtil.LargestTriangleThreeBuckets(dsWorkingSet, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsPrivateBytes = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.PrivateBytes)).ToList();
|
||||
dsPrivateBytes = Util.DataUtil.LargestTriangleThreeBuckets(dsPrivateBytes, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsGen0 = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.Gen0)).ToList();
|
||||
dsGen0 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen0, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsGen1 = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.Gen1)).ToList();
|
||||
dsGen1 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen1, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
var dsGen2 = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.Gen2)).ToList();
|
||||
dsGen2 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen2, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
|
||||
|
||||
|
||||
var ret = new
|
||||
{
|
||||
DownSampled = true,
|
||||
MetricMM = new
|
||||
{
|
||||
cpuLabels = dsCPU.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
cpu = dsCPU.Select(z => z.Item2).ToArray(),
|
||||
get0Labels = dsGen0.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
gen0 = dsGen0.Select(z => (int)z.Item2).ToArray(),
|
||||
get1Labels = dsGen1.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
gen1 = dsGen1.Select(z => (int)z.Item2).ToArray(),
|
||||
get2Labels = dsGen2.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
gen2 = dsGen2.Select(z => (int)z.Item2).ToArray(),
|
||||
allocatedLabels = dsAllocated.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
allocated = dsAllocated.Select(z => (long)z.Item2).ToArray(),
|
||||
workingSetLabels = dsWorkingSet.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
workingSet = dsWorkingSet.Select(z => (long)z.Item2).ToArray(),
|
||||
privateBytesLabels = dsPrivateBytes.Select(z => DateTimeOffset.FromUnixTimeSeconds((long)z.Item1)).ToArray(),
|
||||
privateBytes = dsPrivateBytes.Select(z => (long)z.Item2).ToArray()
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
return Ok(ApiOkResponse.Response(ret));
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
var ret = new
|
||||
{
|
||||
DownSampled = false,
|
||||
MetricMM = new
|
||||
{
|
||||
labels = MinuteMetrics.Select(z => z.t).ToArray(),
|
||||
cpu = MinuteMetrics.Select(z => z.CPU).ToArray(),
|
||||
gen0 = MinuteMetrics.Select(z => z.Gen0).ToArray(),
|
||||
gen1 = MinuteMetrics.Select(z => z.Gen1).ToArray(),
|
||||
gen2 = MinuteMetrics.Select(z => z.Gen2).ToArray(),
|
||||
allocated = MinuteMetrics.Select(z => z.Allocated).ToArray(),
|
||||
workingSet = MinuteMetrics.Select(z => z.WorkingSet).ToArray(),
|
||||
privateBytes = MinuteMetrics.Select(z => z.PrivateBytes).ToArray()
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
return Ok(ApiOkResponse.Response(ret));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//------------
|
||||
|
||||
/*
|
||||
{
|
||||
"chartData": {
|
||||
"labels": [
|
||||
"sunday",
|
||||
"monday",
|
||||
"tuesday",
|
||||
"wednesday",
|
||||
"thursday",
|
||||
"friday",
|
||||
"saturday"
|
||||
],
|
||||
"thisWeek": [
|
||||
20000,
|
||||
14000,
|
||||
12000,
|
||||
15000,
|
||||
18000,
|
||||
19000,
|
||||
22000
|
||||
],
|
||||
"lastWeek": [
|
||||
19000,
|
||||
10000,
|
||||
14000,
|
||||
14000,
|
||||
15000,
|
||||
22000,
|
||||
24000
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
// //split out into seperate arrays
|
||||
// //10 digits is epoch seconds
|
||||
// //List<Tuple<double, double>> cpu=new List<Tuple<double, double>>();
|
||||
// var cpu = MinuteMetrics.Select(z => new Tuple<double, double>(new DateTimeOffset(z.t).ToUnixTimeSeconds(), z.CPU)).ToList();
|
||||
|
||||
// bool DownSampled=false;
|
||||
// if (maxRecords < MinuteMetrics.Count)
|
||||
// {
|
||||
// cpu = Util.DataUtil.LargestTriangleThreeBuckets(cpu, (int)maxRecords) as List<Tuple<double, double>>;
|
||||
// //downsample it here
|
||||
// ;//https://github.com/sveinn-steinarsson/flot-downsample/
|
||||
// DownSampled=true;
|
||||
// }
|
||||
|
||||
// //convert to efficient array of double pairs
|
||||
// // var v = cpu.Select(z => new double[] { z.Item1, z.Item2 }).ToArray();
|
||||
// var v = cpu.Select(z => new MetricItem { x= DateTimeOffset.FromUnixTimeSeconds((long)z.Item1), y=z.Item2 }).ToArray();
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user