using System;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.AspNetCore.Authorization;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json.Linq;
using AyaNova.Models;
using AyaNova.Api.ControllerHelpers;
using AyaNova.Biz;
namespace AyaNova.Api.Controllers
{
///
/// Server metrics
///
[ApiController]
[ApiVersion("8.0")]
[Route("api/v{version:apiVersion}/server-metric")]
[Authorize]
public class ServerMetricsController : ControllerBase
{
private readonly AyContext ct;
private readonly ILogger log;
private readonly ApiServerState serverState;
private const int MAX_RECORDS_BEFORE_DOWNSAMPLING = 400;
private const long MB = (1024 * 1024);
///
/// ctor
///
///
///
///
public ServerMetricsController(AyContext dbcontext, ILogger logger, ApiServerState apiServerState)
{
ct = dbcontext;
log = logger;
serverState = apiServerState;
}
///
/// Get all server metrics for time period specified
///
/// Start timestamp UTC
/// End timestamp UTC
/// Optional maximum records to return. If there are more records for the time period selected than this value the result will be downsampled. There is a 400 record maximum fixed default
/// Snapshot of metrics
[HttpGet]
public async Task GetMetrics([FromQuery, Required] DateTime? tsStart, [FromQuery, Required] DateTime? tsEnd, [FromQuery] int? maxRecords)
{
//Note: the date and times are nullable and required so that the regular modelstate code kicks in to ensure they are present
if (serverState.IsClosed)
return StatusCode(503, new ApiErrorResponse(serverState.ApiErrorCode, null, serverState.Reason));
if (!Authorized.HasReadFullRole(HttpContext.Items, AyaType.Metrics))
return StatusCode(403, new ApiNotAuthorizedResponse());
if (!ModelState.IsValid)
return BadRequest(new ApiErrorResponse(ModelState));
//use specified values or just return all
maxRecords ??= MAX_RECORDS_BEFORE_DOWNSAMPLING;
List MinuteMetrics = new List();
//touniversal is because the parameters are converted to local time here, but then sent to the query as local time as well and not universal time which is what it should be
MinuteMetrics = await ct.MetricMM.AsNoTracking().Where(z => z.t >= ((DateTime)tsStart).ToUniversalTime() && z.t <= ((DateTime)tsEnd).ToUniversalTime()).OrderBy(z => z.t).ToListAsync();
//Log
await EventLogProcessor.LogEventToDatabaseAsync(new Event(UserIdFromContext.Id(HttpContext.Items), 0, AyaType.Metrics, AyaEvent.Retrieved), ct);
//Downsample
if (maxRecords < MinuteMetrics.Count)
{
var dsCPU = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.CPU)).ToList();
dsCPU = Util.DataUtil.LargestTriangleThreeBuckets(dsCPU, (int)maxRecords) as List>;
var dsAllocated = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.Allocated)).ToList();
dsAllocated = Util.DataUtil.LargestTriangleThreeBuckets(dsAllocated, (int)maxRecords) as List>;
var dsWorkingSet = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.WorkingSet)).ToList();
dsWorkingSet = Util.DataUtil.LargestTriangleThreeBuckets(dsWorkingSet, (int)maxRecords) as List>;
var dsPrivateBytes = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.PrivateBytes)).ToList();
dsPrivateBytes = Util.DataUtil.LargestTriangleThreeBuckets(dsPrivateBytes, (int)maxRecords) as List>;
var dsGen0 = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.Gen0)).ToList();
dsGen0 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen0, (int)maxRecords) as List>;
var dsGen1 = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.Gen1)).ToList();
dsGen1 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen1, (int)maxRecords) as List>;
var dsGen2 = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.Gen2)).ToList();
dsGen2 = Util.DataUtil.LargestTriangleThreeBuckets(dsGen2, (int)maxRecords) as List>;
var ret = new
{
DownSampled = true,
MetricMM = new
{
cpuLabels = dsCPU.Select(z => DateTime.FromOADate(z.Item1)).ToArray(),
cpu = dsCPU.Select(z => z.Item2).ToArray(),
genAllLabels = dsGen0.Select(z => DateTime.FromOADate(z.Item1)).Union(dsGen1.Select(z => DateTime.FromOADate(z.Item1))).Union(dsGen2.Select(z => DateTime.FromOADate(z.Item1))).ToArray(),
gen0 = dsGen0.Select(z => new MetricInt(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(),
gen1 = dsGen1.Select(z => new MetricInt(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(),
gen2 = dsGen2.Select(z => new MetricInt(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(),
memAllLabels = dsAllocated.Select(z => DateTime.FromOADate(z.Item1)).Union(dsWorkingSet.Select(z => DateTime.FromOADate(z.Item1))).Union(dsPrivateBytes.Select(z => DateTime.FromOADate(z.Item1))).ToArray(),
allocated = dsAllocated.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(),
workingSet = dsWorkingSet.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(),
privateBytes = dsPrivateBytes.Select(z =>new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray()
}
};
return Ok(ApiOkResponse.Response(ret));
}
else
{
var tsOffset = new TimeSpan(0);
var ret = new
{
DownSampled = false,
MetricMM = new
{
labels = MinuteMetrics.Select(z => z.t).ToArray(),
cpu = MinuteMetrics.Select(z => z.CPU).ToArray(),
gen0 = MinuteMetrics.Select(z => z.Gen0).ToArray(),
gen1 = MinuteMetrics.Select(z => z.Gen1).ToArray(),
gen2 = MinuteMetrics.Select(z => z.Gen2).ToArray(),
allocated = MinuteMetrics.Select(z => z.Allocated / MB).ToArray(),
workingSet = MinuteMetrics.Select(z => z.WorkingSet / MB).ToArray(),
privateBytes = MinuteMetrics.Select(z => z.PrivateBytes / MB).ToArray()
}
};
return Ok(ApiOkResponse.Response(ret));
}
}
//------------
public class MetricLong
{
public DateTime x { get; set; }
public long y { get; set; }
public MetricLong(DateTime px, double py)
{
x = px;
y = (long)py;
}
}
public class MetricInt
{
public DateTime x { get; set; }
public int y { get; set; }
public MetricInt(DateTime px, double py)
{
x = px;
y = (int)py;
}
}
//----------
}
}