This commit is contained in:
@@ -35,13 +35,56 @@ namespace AyaNova.Biz
|
||||
//Gather stats, output to database but only every minute or more
|
||||
/*
|
||||
//TODO: figure out teh best format to store it in based on what I need at the client end
|
||||
// TODO: Use one data table per interval, it's more efficient for all values
|
||||
i.e. for one minute stuff use a single table, for 10 minute use another otherwise will have empty entries in some
|
||||
(test storing null in two columns results in same size so no saving)
|
||||
|
||||
// todo: store data using Postgres REAL / c# float datatype, is 38mb vs 55 for double precision with one year 10 column test data
|
||||
|
||||
|
||||
|
||||
Make a chart at client with test data from digital ocean to play with
|
||||
try to replicate their stuff to learn how to best do it
|
||||
Downsampling, should I convert old data to downsampled so I can keep more of it or is this presentation only
|
||||
depends on storage space I guess
|
||||
seperate tables per metric?
|
||||
Seems likely since there would be less data to move around, but if I'm fetching all anyway??
|
||||
hmm... something to consider / experiment with
|
||||
|
||||
Some D.O. charts have max value at top of left axis (y?) that change with the range and values and some have 100% or 1.0 and never change that axis
|
||||
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
TESTING / SCRATCH PAD:
|
||||
|
||||
// retention setting defaults to 1 year?
|
||||
Generate sample data, see how large it would be under various scenarios
|
||||
i.e. if I gather every minute, how much can I practically store?
|
||||
Shortest time frame that is shown for DO is 1 minute intervals (6 hour / 360 entries)
|
||||
RESULT:
|
||||
525600 entries (every minute for 1 year) results in 22mb of space used
|
||||
Same but with 10 different data columns results in 59mb used
|
||||
####### USE ONE TABLE: if it was individual tables it would be 220 mb used so it's worth using one table for all values
|
||||
|
||||
query: insert into ametriccpu (t,v) select CURRENT_TIMESTAMP, 58.43239007949476 from generate_series(1, 525600) s(i)
|
||||
insert into ametriccpu (
|
||||
t,v1,v2,v3,v4,v5,v6,v7,v8,v9,v10
|
||||
)
|
||||
select
|
||||
LOCALTIMESTAMP,
|
||||
58.43239007949476,
|
||||
0.33006058073955513,
|
||||
102.44723488288768,
|
||||
46.078341513002755,
|
||||
30.23570573933185,
|
||||
0.000136518543824419,
|
||||
65.8400891412282,
|
||||
0.01,
|
||||
58.43239007949476,
|
||||
58.43239007949476
|
||||
|
||||
from generate_series(1, 525600) s(i)
|
||||
|
||||
Timestamp, decimal/float value (need to determine this number type)
|
||||
Number types do has this:
|
||||
@@ -52,15 +95,8 @@ namespace AyaNova.Biz
|
||||
disk sectors written: 0.000136518543824419
|
||||
memory avg: 65.8400891412282
|
||||
load: 0.01
|
||||
Make a chart at client with test data from digital ocean to play with
|
||||
try to replicate their stuff to learn how to best do it
|
||||
Downsampling, should I convert old data to downsampled so I can keep more of it or is this presentation only
|
||||
depends on storage space I guess
|
||||
seperate tables per metric?
|
||||
Seems likely since there would be less data to move around, but if I'm fetching all anyway??
|
||||
hmm... something to consider / experiment with
|
||||
|
||||
Some D.O. charts have max value at top of left axis (y?) that change with the range and values and some have 100% or 1.0 and never change that axis
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ namespace AyaNova.Models
|
||||
{
|
||||
public partial class AyContext : DbContext
|
||||
{
|
||||
public virtual DbSet<MetricCPU> MetricCPU { get; set; }
|
||||
public virtual DbSet<MetricMM> MetricMM { get; set; }
|
||||
public virtual DbSet<User> User { get; set; }
|
||||
public virtual DbSet<UserOptions> UserOptions { get; set; }
|
||||
public virtual DbSet<Widget> Widget { get; set; }
|
||||
@@ -127,7 +127,7 @@ namespace AyaNova.Models
|
||||
//modelBuilder.Entity<PurchaseOrder>().Property(z => z.Serial).UseIdentityByDefaultColumn();
|
||||
|
||||
|
||||
modelBuilder.Entity<MetricCPU>().HasNoKey();
|
||||
//modelBuilder.Entity<MetricCPU>().HasNoKey();
|
||||
|
||||
///////////////////////////////////////////////////////////////////////
|
||||
//TODO: this entire block is almost certainly wrong or not required
|
||||
|
||||
@@ -245,7 +245,7 @@ namespace AyaNova.Util
|
||||
|
||||
//METRICS TABLES
|
||||
//CPU
|
||||
await ExecQueryAsync("CREATE TABLE ametriccpu (t timestamp not null, v double precision not null default 0)");
|
||||
await ExecQueryAsync("CREATE TABLE ametricmm (t timestamp not null, v real not null default 0)");
|
||||
|
||||
|
||||
//SEARCH TABLES
|
||||
|
||||
@@ -91,10 +91,11 @@ namespace AyaNova.Util
|
||||
//TEST METRICS SIZE
|
||||
using (var cct = ServiceProviderProvider.DBContext)
|
||||
{
|
||||
Faker Fake = new Faker();
|
||||
var TestCount = 365 * 24 * 60;//525600 minutes in a year
|
||||
for (int i = 0; i < TestCount; i++)
|
||||
{
|
||||
cct.MetricCPU.Add(new MetricCPU() { v = 33.33333 });
|
||||
cct.MetricMM.Add(new MetricMM() { v = Fake.Finance.Random.Float() });
|
||||
}
|
||||
cct.SaveChanges();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user