using System; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Routing; using Microsoft.AspNetCore.Authorization; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Logging; using Newtonsoft.Json.Linq; using AyaNova.Models; using AyaNova.Api.ControllerHelpers; using AyaNova.Biz; //using StackExchange.Profiling; namespace AyaNova.Api.Controllers { /// /// Server metrics /// [ApiController] [ApiVersion("8.0")] [Route("api/v{version:apiVersion}/server-metric")] [Authorize] public class ServerMetricsController : ControllerBase { private readonly AyContext ct; private readonly ILogger log; private readonly ApiServerState serverState; private const int DEFAULT_MAX_RECORDS = 400; private const long MB = (1024 * 1024); private const long KB = 1024; /// /// ctor /// /// /// /// public ServerMetricsController(AyContext dbcontext, ILogger logger, ApiServerState apiServerState) { ct = dbcontext; log = logger; serverState = apiServerState; } // #if (DEBUG) // [HttpGet("collect")] // [AllowAnonymous] // public ActionResult GetCollect() // { // GC.Collect(); // GC.WaitForPendingFinalizers(); // GC.Collect(); // return Ok(); // } // [HttpGet("hammer")] // [AllowAnonymous] // public async Task GetHammer() // { // //test allocation and cleanup // for (int x = 0; x < 100000; x++) // { // using (AyContext ct = ServiceProviderProvider.DBContext) // var v=await ct.Widget.Where(z=>z.Serial<100).ToListAsync(); // // int i = await ct.Database.ExecuteSqlRawAsync($"select * from aglobalbizsettings"); // } // return Ok(); // } // #endif /// /// Get Memory and CPU server metrics for time period specified /// /// Start timestamp UTC /// End timestamp UTC /// Optional maximum records to return (downsampled). There is a 400 record maximum fixed default /// Snapshot of metrics [HttpGet("memcpu")] public async Task GetMemCPUMetrics([FromQuery, Required] DateTime? tsStart, [FromQuery, Required] DateTime? tsEnd, [FromQuery] int? maxRecords) { //Note: the date and times are nullable and required so that the regular modelstate code kicks in to ensure they are present if (serverState.IsClosed) return StatusCode(503, new ApiErrorResponse(serverState.ApiErrorCode, null, serverState.Reason)); if (!Authorized.HasReadFullRole(HttpContext.Items, AyaType.ServerMetrics)) return StatusCode(403, new ApiNotAuthorizedResponse()); if (!ModelState.IsValid) return BadRequest(new ApiErrorResponse(ModelState)); maxRecords ??= DEFAULT_MAX_RECORDS; List MinuteMetrics = new List(); //touniversal is because the parameters are converted to local time here //but then sent to the query as local time as well and not universal time which is what it should be MinuteMetrics = await ct.MetricMM.AsNoTracking().Where(z => z.t >= ((DateTime)tsStart).ToUniversalTime() && z.t <= ((DateTime)tsEnd).ToUniversalTime()).OrderBy(z => z.t).ToListAsync(); var dsCPU = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.CPU)).ToList(); dsCPU = Util.DataUtil.LargestTriangleThreeBuckets(dsCPU, (int)maxRecords) as List>; var dsAllocated = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.Allocated)).ToList(); dsAllocated = Util.DataUtil.LargestTriangleThreeBuckets(dsAllocated, (int)maxRecords) as List>; var dsWorkingSet = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.WorkingSet)).ToList(); dsWorkingSet = Util.DataUtil.LargestTriangleThreeBuckets(dsWorkingSet, (int)maxRecords) as List>; var dsPrivateBytes = MinuteMetrics.Select(z => new Tuple(z.t.ToOADate(), z.PrivateBytes)).ToList(); dsPrivateBytes = Util.DataUtil.LargestTriangleThreeBuckets(dsPrivateBytes, (int)maxRecords) as List>; var ret = new { cpu = dsCPU.Select(z => new MetricDouble(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(), allocated = dsAllocated.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(), workingSet = dsWorkingSet.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(), privateBytes = dsPrivateBytes.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray() }; await EventLogProcessor.LogEventToDatabaseAsync(new Event(UserIdFromContext.Id(HttpContext.Items), 0, AyaType.ServerMetrics, AyaEvent.Retrieved), ct); return Ok(ApiOkResponse.Response(ret)); } /// /// Get storage server metrics for time period specified /// /// Start timestamp UTC /// End timestamp UTC /// Optional maximum records to return (downsampled). There is a 400 record maximum fixed default /// Snapshot of metrics [HttpGet("storage")] public async Task GetStorageMetrics([FromQuery, Required] DateTime? tsStart, [FromQuery, Required] DateTime? tsEnd, [FromQuery] int? maxRecords) { //Note: the date and times are nullable and required so that the regular modelstate code kicks in to ensure they are present if (serverState.IsClosed) return StatusCode(503, new ApiErrorResponse(serverState.ApiErrorCode, null, serverState.Reason)); if (!Authorized.HasReadFullRole(HttpContext.Items, AyaType.ServerMetrics)) return StatusCode(403, new ApiNotAuthorizedResponse()); if (!ModelState.IsValid) return BadRequest(new ApiErrorResponse(ModelState)); maxRecords ??= DEFAULT_MAX_RECORDS; List DailyMetrics = new List(); //touniversal is because the parameters are converted to local time here //but then sent to the query as local time as well and not universal time which is what it should be DailyMetrics = await ct.MetricDD.AsNoTracking().Where(z => z.t >= ((DateTime)tsStart).ToUniversalTime() && z.t <= ((DateTime)tsEnd).ToUniversalTime()).OrderBy(z => z.t).ToListAsync(); var dsAttachmentFileCount = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.AttachmentFileCount)).ToList(); dsAttachmentFileCount = Util.DataUtil.LargestTriangleThreeBuckets(dsAttachmentFileCount, (int)maxRecords) as List>; var dsAttachmentFileSize = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.AttachmentFileSize)).ToList(); dsAttachmentFileSize = Util.DataUtil.LargestTriangleThreeBuckets(dsAttachmentFileSize, (int)maxRecords) as List>; var dsAttachmentFilesAvailableSpace = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.AttachmentFilesAvailableSpace)).ToList(); dsAttachmentFilesAvailableSpace = Util.DataUtil.LargestTriangleThreeBuckets(dsAttachmentFilesAvailableSpace, (int)maxRecords) as List>; var dsUtilityFileCount = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.UtilityFileCount)).ToList(); dsUtilityFileCount = Util.DataUtil.LargestTriangleThreeBuckets(dsUtilityFileCount, (int)maxRecords) as List>; var dsUtilityFileSize = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.UtilityFileSize)).ToList(); dsUtilityFileSize = Util.DataUtil.LargestTriangleThreeBuckets(dsUtilityFileSize, (int)maxRecords) as List>; var dsUtilityFilesAvailableSpace = DailyMetrics.Select(z => new Tuple(z.t.ToOADate(), z.UtilityFilesAvailableSpace)).ToList(); dsUtilityFilesAvailableSpace = Util.DataUtil.LargestTriangleThreeBuckets(dsUtilityFilesAvailableSpace, (int)maxRecords) as List>; var ret = new { attachmentFileCount = dsAttachmentFileCount.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(), attachmentFileSize = dsAttachmentFileSize.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(), attachmentFilesAvailableSpace = dsAttachmentFilesAvailableSpace.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(), utilityFileCount = dsUtilityFileCount.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2)).ToArray(), utilityFileSize = dsUtilityFileSize.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray(), utilityFilesAvailableSpace = dsUtilityFilesAvailableSpace.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray() }; await EventLogProcessor.LogEventToDatabaseAsync(new Event(UserIdFromContext.Id(HttpContext.Items), 0, AyaType.ServerMetrics, AyaEvent.Retrieved), ct); return Ok(ApiOkResponse.Response(ret)); } /// /// Get database related metrics for time period specified /// /// Start timestamp UTC /// End timestamp UTC /// Optional maximum records to return (downsampled). There is a 400 record maximum fixed default /// Snapshot of metrics [HttpGet("db")] public async Task GetDBMetrics([FromQuery, Required] DateTime? tsStart, [FromQuery, Required] DateTime? tsEnd, [FromQuery] int? maxRecords) { //Note: the date and times are nullable and required so that the regular modelstate code kicks in to ensure they are present if (serverState.IsClosed) return StatusCode(503, new ApiErrorResponse(serverState.ApiErrorCode, null, serverState.Reason)); if (!Authorized.HasReadFullRole(HttpContext.Items, AyaType.ServerMetrics)) return StatusCode(403, new ApiNotAuthorizedResponse()); if (!ModelState.IsValid) return BadRequest(new ApiErrorResponse(ModelState)); maxRecords ??= DEFAULT_MAX_RECORDS; //############ DB SIZE TIME SERIES MB List DBMetrics = new List(); //touniversal is because the parameters are converted to local time here //but then sent to the query as local time as well and not universal time which is what it should be DBMetrics = await ct.MetricDD.AsNoTracking().Where(z => z.t >= ((DateTime)tsStart).ToUniversalTime() && z.t <= ((DateTime)tsEnd).ToUniversalTime()).OrderBy(z => z.t).ToListAsync(); var dsDBTotalSize = DBMetrics.Select(z => new Tuple(z.t.ToOADate(), z.DBTotalSize)).ToList(); dsDBTotalSize = Util.DataUtil.LargestTriangleThreeBuckets(dsDBTotalSize, (int)maxRecords) as List>; //############# TOP TABLES KB int AllTableCount=0; List TopTables = new List(); using (var command = ct.Database.GetDbConnection().CreateCommand()) { /* SELECT table_name, pg_total_relation_size(table_name) AS total_size FROM ( SELECT ( table_schema || '.' || table_name ) AS table_name FROM information_schema.tables where table_schema not in('pg_catalog','information_schema')) AS all_tables ORDER BY total_size DESC */ var cmd = @"SELECT table_name, pg_total_relation_size(table_name) AS total_size FROM ( SELECT ('""' || table_schema || '"".""' || table_name || '""') AS table_name FROM information_schema.tables where table_schema not in('pg_catalog','information_schema') ) AS all_tables ORDER BY total_size DESC"; command.CommandText = cmd; ct.Database.OpenConnection(); using (var dr = command.ExecuteReader()) { if (dr.HasRows) { while (dr.Read()) { AllTableCount++; long tableSize = dr.GetInt64(1); string tableName = dr.GetString(0); tableName = tableName.Replace("\"", "").Replace("public.a", ""); if (tableSize > 0) { tableSize = tableSize / KB; } TopTables.Add(new MetricNameLongValue() { name = tableName, value = tableSize }); } } ct.Database.CloseConnection(); } } //trim out tables less than 1kb (the math above sets anything less than 1kb to zero) TopTables = TopTables.Where(z => z.value > 48).ToList();//NOTE: empty tables seem to all be 48kb so that's why this is here int OtherTableCount=AllTableCount-TopTables.Count(); long DBTotalSize = 0; using (var command = ct.Database.GetDbConnection().CreateCommand()) { command.CommandText = "select pg_database_size(current_database());"; ct.Database.OpenConnection(); using (var dr = command.ExecuteReader()) { if (dr.HasRows) { DBTotalSize = dr.Read() ? (dr.GetInt64(0) / KB) : 0; } ct.Database.CloseConnection(); } } long ttSize = 0; foreach (MetricNameLongValue tt in TopTables) { ttSize += tt.value; } // TopTables.Add(new MetricNameLongValue() { name = $"{OtherTableCount} others", value = DBTotalSize - ttSize }); var ret = new { TopTables = TopTables.OrderByDescending(z => z.value).ToList(), totalSize = dsDBTotalSize.Select(z => new MetricLong(DateTime.FromOADate(z.Item1), z.Item2 / MB)).ToArray() }; await EventLogProcessor.LogEventToDatabaseAsync(new Event(UserIdFromContext.Id(HttpContext.Items), 0, AyaType.ServerMetrics, AyaEvent.Retrieved), ct); return Ok(ApiOkResponse.Response(ret)); } //------------ public class MetricLong { public DateTime x { get; set; } public long y { get; set; } public MetricLong(DateTime px, double py) { x = px; y = (long)py; } } public class MetricInt { public DateTime x { get; set; } public int y { get; set; } public MetricInt(DateTime px, double py) { x = px; y = (int)py; } } public class MetricDouble { public DateTime x { get; set; } public double y { get; set; } public MetricDouble(DateTime px, double py) { x = px; y = py; } } public class MetricNameLongValue { public string name { get; set; } public long value { get; set; } } //---------- } }