This commit is contained in:
2022-12-16 06:01:23 +00:00
parent 26c2ae5cc9
commit effd96143f
310 changed files with 48715 additions and 0 deletions

View File

@@ -0,0 +1,16 @@
using Microsoft.Extensions.Logging;
using NLog.Extensions.Logging;
namespace Sockeye.Util
{
/// <summary>
/// Shared logger
/// </summary>
internal static class ApplicationLogging
{
internal static ILogger theLogger { get; set; }
internal static NLogLoggerProvider LoggerProvider { get; set; }
internal static ILogger CreateLogger<T>() => LoggerProvider.CreateLogger(typeof(T).FullName);
internal static ILogger CreateLogger(string categoryName) => LoggerProvider.CreateLogger(categoryName);
}
}

930
server/util/AySchema.cs Normal file
View File

@@ -0,0 +1,930 @@
using System;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore;
using Sockeye.Models;
namespace Sockeye.Util
{
//Key generator controller
public static class AySchema
{
private static ILogger log;
private static AyContext ct;
/////////////////////////////////////////////////////////////////
/////////// CHANGE THIS ON NEW SCHEMA UPDATE ////////////////////
//!!!!WARNING: BE SURE TO UPDATE THE DbUtil::EmptyBizDataFromDatabaseForSeedingOrImportingAsync WHEN NEW TABLES ADDED!!!!
private const int DESIRED_SCHEMA_LEVEL = 14;
internal const long EXPECTED_COLUMN_COUNT = 385;
internal const long EXPECTED_INDEX_COUNT = 63;
internal const long EXPECTED_CHECK_CONSTRAINTS = 193;
internal const long EXPECTED_FOREIGN_KEY_CONSTRAINTS = 26;
internal const long EXPECTED_VIEWS = 0;
internal const long EXPECTED_ROUTINES = 2;
//!!!!WARNING: BE SURE TO UPDATE THE DbUtil::EmptyBizDataFromDatabaseForSeedingOrImportingAsync WHEN NEW TABLES ADDED!!!!
///////////////////////////////////////// (C385:I63:CC193:FC26:V0:R2)
/*
MAXIMUM POSTGRES OBJECT NAME LENGTH: 63 CHARACTERS
DECIMALS:
=-=-=-=--
DECIMAL(PRECISION,SCALE)
DECIMAL([MAXIMUM DIGITS TOTAL],[DIGITS TO THE RIGHT OF DECIMAL POINT])
(left digits max is precision minus scale)
CURRENCY: DECIMAL(38,18) (to support potential of cryptocurrencies) largest Etherium value fits in this (36bytes)
TAX/PERCENTAGES/PDF PAGE SCALE: DECIMAL(10,5) largest tax I could find would fit in this, (was 8,5 but Joyce had an item that was 8000% so changed to allow up to 10000%)
Taxes are in face value not fractional value, i.e. "7" not .07 in db
Inventory/incidents/service rate quantity etc general numbers (19,5)
Latitude/longitude 9,6
//DATA TYPES .net to postgres map
//http://www.npgsql.org/doc/types/basic.html
//if need to query a bit field: https://www.ehfeng.com/querying-bitfield-with-sql/
HOW TO INDEX
https://www.postgresqltutorial.com/postgresql-indexes/postgresql-create-index/
Other indexes should be created with care and after a huge load and integration test periodically look for unused indexes and see how they are performing
HOW TO FIND SHITTY INDEXES: https://gist.github.com/jberkus/6b1bcaf7724dfc2a54f3
see core-performance.txt for the relevant queries to view this info
***************************** WARNING: Be careful here, if a standard field is hideable and also it's DB SCHEMA is set to NON NULLABLE then the CLIENT end needs to set a default
***************************** Otherwise the hidden field can't be set and the object can't be saved EVER
Official guidance on index analysis
https://www.postgresql.org/docs/13/monitoring-stats.html
*/
#region unused index query
/*
COPY taken 2020-05-21 from link above "jerkus" :)
WITH table_scans as (
SELECT relid,
tables.idx_scan + tables.seq_scan as all_scans,
( tables.n_tup_ins + tables.n_tup_upd + tables.n_tup_del ) as writes,
pg_relation_size(relid) as table_size
FROM pg_stat_user_tables as tables
),
all_writes as (
SELECT sum(writes) as total_writes
FROM table_scans
),
indexes as (
SELECT idx_stat.relid, idx_stat.indexrelid,
idx_stat.schemaname, idx_stat.relname as tablename,
idx_stat.indexrelname as indexname,
idx_stat.idx_scan,
pg_relation_size(idx_stat.indexrelid) as index_bytes,
indexdef ~* 'USING btree' AS idx_is_btree
FROM pg_stat_user_indexes as idx_stat
JOIN pg_index
USING (indexrelid)
JOIN pg_indexes as indexes
ON idx_stat.schemaname = indexes.schemaname
AND idx_stat.relname = indexes.tablename
AND idx_stat.indexrelname = indexes.indexname
WHERE pg_index.indisunique = FALSE
),
index_ratios AS (
SELECT schemaname, tablename, indexname,
idx_scan, all_scans,
round(( CASE WHEN all_scans = 0 THEN 0.0::NUMERIC
ELSE idx_scan::NUMERIC/all_scans * 100 END),2) as index_scan_pct,
writes,
round((CASE WHEN writes = 0 THEN idx_scan::NUMERIC ELSE idx_scan::NUMERIC/writes END),2)
as scans_per_write,
pg_size_pretty(index_bytes) as index_size,
pg_size_pretty(table_size) as table_size,
idx_is_btree, index_bytes
FROM indexes
JOIN table_scans
USING (relid)
),
index_groups AS (
SELECT 'Never Used Indexes' as reason, *, 1 as grp
FROM index_ratios
WHERE
idx_scan = 0
and idx_is_btree
UNION ALL
SELECT 'Low Scans, High Writes' as reason, *, 2 as grp
FROM index_ratios
WHERE
scans_per_write <= 1
and index_scan_pct < 10
and idx_scan > 0
and writes > 100
and idx_is_btree
UNION ALL
SELECT 'Seldom Used Large Indexes' as reason, *, 3 as grp
FROM index_ratios
WHERE
index_scan_pct < 5
and scans_per_write > 1
and idx_scan > 0
and idx_is_btree
and index_bytes > 100000000
UNION ALL
SELECT 'High-Write Large Non-Btree' as reason, index_ratios.*, 4 as grp
FROM index_ratios, all_writes
WHERE
( writes::NUMERIC / ( total_writes + 1 ) ) > 0.02
AND NOT idx_is_btree
AND index_bytes > 100000000
ORDER BY grp, index_bytes DESC )
SELECT reason, schemaname, tablename, indexname,
index_scan_pct, scans_per_write, index_size, table_size
FROM index_groups;
*/
#endregion
static int startingSchema = -1;
public static int currentSchema = -1;
//check and update schema
public static async Task CheckAndUpdateAsync(AyContext context, ILogger logger)
{
ct = context;
log = logger;
//Check if ayschemaversion table exists
bool aySchemaVersionExists = false;
using (var command = ct.Database.GetDbConnection().CreateCommand())
{
command.CommandText = "SELECT * FROM information_schema.tables WHERE table_name = 'aschemaversion'";
await ct.Database.OpenConnectionAsync();
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
aySchemaVersionExists = true;
}
await ct.Database.CloseConnectionAsync();
}
}
//Create schema table (v0)
if (!aySchemaVersionExists)
{
log.LogDebug("aschemaversion table not found, creating now");
//nope, no schema table, ADD it now and set to v1
using (var cm = ct.Database.GetDbConnection().CreateCommand())
{
await ct.Database.OpenConnectionAsync();
cm.CommandText = "CREATE TABLE aschemaversion (schema INTEGER NOT NULL, id TEXT NOT NULL);";
await cm.ExecuteNonQueryAsync();
cm.CommandText = $"insert into aschemaversion (schema, id) values (0,'{Sockeye.Util.Hasher.GenerateSalt()}');";//NOTE: this is where the dbid comes from originally
await cm.ExecuteNonQueryAsync();
await ct.Database.CloseConnectionAsync();
startingSchema = 0;
currentSchema = 0;
}
}
else
{
//get current schema level
using (var cm = ct.Database.GetDbConnection().CreateCommand())
{
log.LogDebug("Fetching current schema version");
cm.CommandText = "SELECT schema FROM aschemaversion;";
await ct.Database.OpenConnectionAsync();
using (var result = await cm.ExecuteReaderAsync())
{
if (result.HasRows)
{
await result.ReadAsync();
currentSchema = startingSchema = result.GetInt32(0);
await ct.Database.CloseConnectionAsync();
log.LogDebug("Sockeye schema version is " + currentSchema.ToString());
}
else
{
await ct.Database.CloseConnectionAsync();
throw new System.Exception("Sockeye->AySchema->CheckAndUpdate: Error reading schema version");
}
}
}
}
//Bail early no update?
if (currentSchema == DESIRED_SCHEMA_LEVEL)
{
log.LogDebug("Current schema is at required schema version " + currentSchema.ToString());
return;
}
log.LogInformation("Sockeye database needs to be updated from schema version {0} to version {1}", currentSchema, DESIRED_SCHEMA_LEVEL);
//************* SCHEMA UPDATES ******************
bool PrimeEmptyDB = false;
//////////////////////////////////////////////////
// v8 initial release TABLES
//
if (currentSchema < 1)
{
LogUpdateMessage(log);
PrimeEmptyDB = true;
//create global biz settings table
await ExecQueryAsync("CREATE TABLE aglobalbizsettings (id INTEGER NOT NULL PRIMARY KEY, "
+ "webaddress TEXT, phone1 TEXT, phone2 TEXT, emailaddress TEXT, postaddress TEXT, postcity TEXT, postregion TEXT, postcountry TEXT, postcode TEXT, "
+ "address TEXT, city TEXT, region TEXT, country TEXT, latitude DECIMAL(9,6), longitude DECIMAL(9,6), "
+ "filtercasesensitive BOOL DEFAULT FALSE, "
+ "customerallowusersettings BOOL DEFAULT FALSE, customerallowusersettingsintags VARCHAR(255) ARRAY "
+ ")");
//create global ops BACKUP settings table
await ExecQueryAsync("CREATE TABLE aglobalopsbackupsettings (id INTEGER NOT NULL PRIMARY KEY, active BOOL NOT NULL, "
+ "backuptime TIMESTAMPTZ, backupsetstokeep int, backupattachments BOOL)");
await ExecQueryAsync("CREATE TABLE aglobalopsnotificationsettings (id INTEGER NOT NULL PRIMARY KEY, smtpdeliveryactive BOOL NOT NULL, "
+ "smtpserveraddress TEXT, smtpaccount TEXT, smtppassword TEXT, connectionsecurity INTEGER NOT NULL default 0, smtpserverport INTEGER, notifyfromaddress TEXT, sockeyeserverurl TEXT)");
//create aevent biz event log table
await ExecQueryAsync("CREATE TABLE aevent (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, created TIMESTAMPTZ NOT NULL, userid BIGINT NOT NULL,"
+ "sockid BIGINT NOT NULL, socktype INTEGER NOT NULL, sockevent INTEGER NOT NULL, textra VARCHAR(255))");
//INDEX: Most selective first as there is more UNIQUE ID's than UNIQUE types
await ExecQueryAsync("CREATE INDEX idx_aevent_sockid_sockType ON aevent (sockid, socktype);");
//TODO: this may be a very low used index, revisit it down the road
await ExecQueryAsync("CREATE INDEX idx_aevent_userid ON aevent (userid);");
//METRICS TABLES
//One minute metrics
await ExecQueryAsync("CREATE TABLE ametricmm (t TIMESTAMPTZ NOT NULL, allocated BIGINT,workingset BIGINT,privatebytes BIGINT,cpu double precision)");
//One day metrics
await ExecQueryAsync("CREATE TABLE ametricdd (t TIMESTAMPTZ NOT NULL, dbtotalsize BIGINT, attachmentfilesize BIGINT, attachmentfilecount BIGINT, attachmentfilesavailablespace BIGINT, utilityfilesize BIGINT, utilityfilecount BIGINT, utilityfilesavailablespace BIGINT)");
//SEARCH TABLES
await ExecQueryAsync("CREATE TABLE asearchdictionary (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, word VARCHAR(255) NOT NULL UNIQUE)");
//Must be UNIQUE and also this is hit a *lot* during searches and also indexing
//On actual testing this index is never used so for now removing it, perhaps it is a case of bad data but I tested with Huge dataset
//await ExecQueryAsync("CREATE UNIQUE INDEX asearchdictionary_word_idx ON asearchdictionary (word);");
//search key
await ExecQueryAsync("CREATE TABLE asearchkey (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, wordid BIGINT NOT NULL REFERENCES asearchdictionary (id), objectid BIGINT NOT NULL, sockType INTEGER NOT NULL)");
//INDEX: Most selective first as there is more UNIQUE ID's than UNIQUE types
//to take advantage of this always query with where objectid=xx and sockType=yy order
//Only delete would use this, but, likely not needed as it's not in a loop
//await ExecQueryAsync("CREATE INDEX asearchkey_typeid_idx ON asearchkey (objectid, sockType );");
//This is what is needed during Searching
//search does a lot of hits on searchkey looking for the wordid and optionally sockType
//In testing this did not pan out, in fact it was much faster to search both with and without a sockType specified to simply have an index on wordid
// await ExecQueryAsync("CREATE INDEX asearchkey_wordid_otype_idx ON asearchkey (wordid, sockType);");
await ExecQueryAsync("CREATE INDEX idx_asearchkey_wordid ON asearchkey (wordid);");
//Search indexing stored procedure
await ExecQueryAsync(@"
CREATE OR REPLACE PROCEDURE public.aydosearchindex(
wordlist TEXT[],
ayobjectid BIGINT,
socktype INTEGER,
cleanfirst boolean)
LANGUAGE 'plpgsql'
AS $BODY$DECLARE
s TEXT;
wordid BIGINT;
BEGIN
IF ayobjectid=0 THEN
RAISE EXCEPTION 'Bad object id --> %', ayobjectid;
END IF;
IF socktype=0 THEN
RAISE EXCEPTION 'Bad object type --> %', socktype;
END IF;
IF cleanfirst=true THEN
delete from asearchkey where objectid=ayobjectid and sockType=socktype;
END IF;
FOREACH s IN ARRAY wordlist
LOOP
SELECT id INTO wordid FROM asearchdictionary WHERE word = s;
IF wordid IS NULL THEN
insert into asearchdictionary (word) values(s) on conflict (word) do update set word=excluded.word returning id into wordid;
insert into asearchkey (wordid,objectid,sockType) values(wordid,ayobjectid,socktype);
ELSE
insert into asearchkey (wordid,objectid,sockType) values(wordid,ayobjectid,socktype);
END IF;
END LOOP;
END;
$BODY$;
");
//Original Name fetcher function, superseded by later updates
await ExecQueryAsync(@"
CREATE OR REPLACE FUNCTION PUBLIC.AYGETNAME(IN AYOBJECTID BIGINT, IN AYATYPE INTEGER,TRANSLATIONID integer) RETURNS TEXT AS $BODY$
DECLARE
aytable TEXT DEFAULT '';
aynamecolumn TEXT DEFAULT 'name';
aytkey TEXT DEFAULT 'no';
returnstr TEXT DEFAULT '';
BEGIN
case socktype
when 0 then aytkey= 'NoType';
when 1 then aytkey= 'Global';
when 2 then return 'FormUserOptions';
when 3 then aytable = 'auser';
when 4 then aytkey= 'ServerState';
when 5 then aytkey= 'License';
when 6 then aytkey= 'LogFile';
when 7 then aytkey= 'PickListTemplate';
when 8 then aytable = 'acustomer';
when 9 then aytkey= 'ServerJob';
when 12 then aytkey= 'ServerMetrics';
when 13 then aytable = 'atranslation';
when 14 then aytkey= 'UserOptions';
when 15 then aytable = 'aheadoffice';
when 17 then aytable = 'afileattachment'; aynamecolumn ='displayfilename';
when 18 then aytable = 'adatalistsavedfilter';
when 19 then aytable = 'aformcustom'; aynamecolumn = 'formkey';
when 47 then aytkey= 'GlobalOps';
when 48 then aytkey= 'BizMetrics';
when 49 then aytkey= 'Backup';
when 50 then aytable = 'ainappnotification';
when 51 then aytkey= 'NotifySubscription';
when 52 then aytable = 'areminder';
when 56 then aytkey= 'OpsNotificationSettings';
when 57 then aytable = 'areport';
when 58 then aytkey= 'DashBoardView';
when 59 then aytable = 'acustomernote'; aynamecolumn = 'notedate';
when 60 then aytable = 'amemo';
when 61 then aytable = 'areview';
when 68 then return format('DataListColumnView %L', ayobjectid);
when 84 then aytkey= 'CustomerNotifySubscription';
else
RETURN returnstr;
end case;
IF aytkey='no' then
EXECUTE format('SELECT %I FROM %I WHERE id = %L', aynamecolumn, aytable, ayobjectid) INTO returnstr;
else
EXECUTE format('select display from atranslationitem where translationid=%L and key=%L', TRANSLATIONID, aytkey) INTO returnstr;
END if;
RETURN returnstr;
END;
$BODY$ LANGUAGE PLPGSQL STABLE");
//Usage: select created, textra, AYGETNAME(aevent.ayid, aevent.socktype) as name from aevent order by created
//create translation TEXT tables
await ExecQueryAsync("CREATE TABLE atranslation (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL UNIQUE, baselanguage TEXT NOT NULL, stock BOOL, cjkindex BOOL default false)");
await ExecQueryAsync("CREATE TABLE atranslationitem (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, translationid BIGINT NOT NULL REFERENCES atranslation (id), key TEXT NOT NULL, display TEXT NOT NULL)");
//a lot of queries for subsets of translations
await ExecQueryAsync("CREATE INDEX idx_atranslationitem_key ON atranslationitem (key)");
//Load the default TRANSLATIONS
await Sockeye.Biz.PrimeData.PrimeTranslations();
//Add user table
//!!WARNING: changes here need to be reflected in dbutil::EmptyBizDataFromDatabaseForSeedingOrImportingAsync auser_backup
await ExecQueryAsync("CREATE TABLE auser (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, active BOOL NOT NULL, name TEXT NOT NULL, "
+ "lastlogin TIMESTAMPTZ, login TEXT NOT NULL UNIQUE, password TEXT NOT NULL, salt TEXT NOT NULL, roles INTEGER NOT NULL, currentauthtoken TEXT, "
+ "dlkey TEXT, dlkeyexpire TIMESTAMPTZ, totpsecret TEXT, temptoken TEXT, twofactorenabled BOOL, passwordresetcode TEXT, passwordresetcodeexpire TIMESTAMPTZ, usertype INTEGER NOT NULL, "
+ "employeenumber TEXT, notes TEXT, customerid BIGINT, "
+ "headofficeid BIGINT, vendorid BIGINT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY)");
//Add user options table
//!!WARNING: changes here need to be reflected in dbutil::EmptyBizDataFromDatabaseForSeedingOrImportingAsync auseroptions_backup
await ExecQueryAsync("CREATE TABLE auseroptions (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "userid BIGINT NOT NULL UNIQUE REFERENCES auser (id) ON DELETE CASCADE, translationid BIGINT NOT NULL REFERENCES atranslation (id), languageoverride TEXT, timezoneoverride TEXT, "
+ "currencyname TEXT, hour12 BOOL NOT NULL, emailaddress TEXT, phone1 TEXT, phone2 TEXT, phone3 TEXT, mapurltemplate TEXT, uicolor VARCHAR(12) NOT NULL default '#ffffff')");
// //Prime the db with the default SuperUser account
// await Sockeye.Biz.PrimeData.PrimeSuperUserAccount(ct);
await ExecQueryAsync("CREATE TABLE afileattachment (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "attachtoobjectid BIGINT NOT NULL, attachtosockType INTEGER NOT NULL, attachedbyuserid BIGINT NOT NULL REFERENCES auser (id), "
+ "storedfilename TEXT NOT NULL, displayfilename TEXT NOT NULL, contenttype TEXT, lastmodified TIMESTAMPTZ NOT NULL, notes TEXT, exists BOOL NOT NULL, size BIGINT NOT NULL)");
//index required for ops that need to check if file already in db (delete, count refs etc)
//LOOKAT: isn't this useless without the ID as well or is that not fetched?
await ExecQueryAsync("CREATE INDEX idx_afileattachment_storedfilename ON afileattachment (storedfilename);");
//index for the common issue of checking if an object has an attachment and retrieving them
//note always query (where clause) in this same order for best performance
await ExecQueryAsync("CREATE INDEX idx_afileattachment_attachtoobjectid_attachtosockType ON afileattachment (attachtoobjectid, attachtosockType );");
await ExecQueryAsync("CREATE TABLE aopsjob (gid uuid PRIMARY KEY, name TEXT NOT NULL, created TIMESTAMPTZ NOT NULL, exclusive BOOL NOT NULL, "
+ "startafter TIMESTAMPTZ NOT NULL, jobtype INTEGER NOT NULL, subtype INTEGER, objectid BIGINT, sockType INTEGER, jobstatus INTEGER NOT NULL, jobinfo TEXT)");
await ExecQueryAsync("CREATE TABLE aopsjoblog (gid uuid PRIMARY KEY, jobid uuid NOT NULL, created TIMESTAMPTZ NOT NULL, statustext TEXT NOT NULL)");
await ExecQueryAsync("CREATE TABLE adatalistsavedfilter (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, userid BIGINT NOT NULL, name TEXT NOT NULL, public BOOL NOT NULL, "
+ "defaultfilter BOOL NOT NULL, listkey VARCHAR(255) NOT NULL, filter TEXT)");
await ExecQueryAsync("CREATE TABLE adatalistcolumnview (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, userid BIGINT NOT NULL, "
+ "listkey VARCHAR(255) NOT NULL, columns TEXT, sort TEXT)");
await ExecQueryAsync("CREATE TABLE atag (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL UNIQUE, refcount BIGINT NOT NULL)");
await ExecQueryAsync("CREATE TABLE aformuseroptions (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "userid BIGINT NOT NULL REFERENCES auser ON DELETE CASCADE, formkey VARCHAR(255) NOT NULL, options TEXT NOT NULL)");
await ExecQueryAsync("CREATE TABLE aformcustom (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "formkey VARCHAR(255) NOT NULL, template TEXT, UNIQUE(formkey))");
await ExecQueryAsync("CREATE TABLE apicklisttemplate (id INTEGER NOT NULL PRIMARY KEY, "
+ "template TEXT)");
//MEMO
await ExecQueryAsync("CREATE TABLE amemo (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL, "
+ "notes TEXT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY, "
+ "sent TIMESTAMPTZ NOT NULL, viewed BOOL default false, replied BOOL default false, fromid BIGINT NOT NULL REFERENCES auser(id), toid BIGINT NOT NULL REFERENCES auser(id) )");
//REMINDER
await ExecQueryAsync("CREATE TABLE areminder (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL, "
+ "notes TEXT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY, "
+ "startdate TIMESTAMPTZ NOT NULL, stopdate TIMESTAMPTZ NOT NULL, userid BIGINT NOT NULL REFERENCES auser(id) ON DELETE CASCADE, color VARCHAR(12) NOT NULL default '#ffffff')");
await ExecQueryAsync("CREATE INDEX idx_areminder_userid ON areminder (userid);");
await ExecQueryAsync("CREATE INDEX idx_areminder_startdate ON areminder (startdate);");
await ExecQueryAsync("CREATE INDEX idx_areminder_stopdate ON areminder (stopdate);");
//REVIEW
await ExecQueryAsync("CREATE TABLE areview (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL, "
+ "notes TEXT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY, "
+ "reviewdate TIMESTAMPTZ NOT NULL, completeddate TIMESTAMPTZ NULL, completionnotes TEXT, userid BIGINT NOT NULL REFERENCES auser(id) ON DELETE CASCADE, "
+ "assignedbyuserid BIGINT NOT NULL REFERENCES auser(id), sockType INTEGER NOT NULL, objectid BIGINT NOT NULL)");
await ExecQueryAsync("CREATE INDEX idx_areview_objectid_sockType ON areview (objectid, sockType );");
await ExecQueryAsync("CREATE INDEX idx_areview_userid ON areview (userid);");
await ExecQueryAsync("CREATE INDEX idx_areview_reviewdate ON areview (reviewdate);");
await ExecQueryAsync("CREATE INDEX idx_areview_completeddate ON areview (completeddate);");
//CUSTOMER
await ExecQueryAsync("CREATE TABLE acustomer (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL UNIQUE, active BOOL NOT NULL, "
+ "notes TEXT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY, "
+ "webaddress TEXT, alertnotes TEXT, billheadoffice BOOL, technotes TEXT, accountnumber TEXT, "
+ "phone1 TEXT, phone2 TEXT, phone3 TEXT, phone4 TEXT, phone5 TEXT, emailaddress TEXT, "
+ "postaddress TEXT, postcity TEXT, postregion TEXT, postcountry TEXT, postcode TEXT, address TEXT, city TEXT, region TEXT, country TEXT, latitude DECIMAL(9,6), longitude DECIMAL(9,6) "
+ ")");
await ExecQueryAsync("ALTER TABLE auser ADD FOREIGN KEY (customerid) REFERENCES acustomer(id)");
//CUSTOMER NOTES
await ExecQueryAsync("CREATE TABLE acustomernote (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "customerid BIGINT NOT NULL REFERENCES acustomer(id), userid BIGINT NOT NULL REFERENCES auser(id), "
+ "notedate TIMESTAMPTZ NOT NULL, notes TEXT, tags VARCHAR(255) ARRAY )");
//HEADOFFICE
await ExecQueryAsync("CREATE TABLE aheadoffice (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL UNIQUE, active BOOL NOT NULL, "
+ "notes TEXT, wiki TEXT, customfields TEXT, tags VARCHAR(255) ARRAY,"
+ "webaddress TEXT, accountnumber TEXT, "
+ "phone1 TEXT, phone2 TEXT, phone3 TEXT, phone4 TEXT, phone5 TEXT, emailaddress TEXT, "
+ "postaddress TEXT, postcity TEXT, postregion TEXT, postcountry TEXT, postcode TEXT, address TEXT, city TEXT, region TEXT, country TEXT, latitude DECIMAL(9,6), longitude DECIMAL(9,6) "
+ " )");
await ExecQueryAsync("ALTER TABLE acustomer ADD column headofficeid BIGINT NULL REFERENCES aheadoffice");
await ExecQueryAsync("ALTER TABLE auser ADD FOREIGN KEY (headofficeid) REFERENCES aheadoffice(id)");
//NOTIFICATION
await ExecQueryAsync("CREATE TABLE anotifysubscription (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "userid BIGINT NOT NULL REFERENCES auser (id) ON DELETE CASCADE, socktype INTEGER NOT NULL, eventtype INTEGER NOT NULL, advancenotice INTERVAL NOT NULL, "
+ "idvalue BIGINT NOT NULL, decvalue DECIMAL(38,18) NOT NULL, agevalue INTERVAL NOT NULL, deliverymethod INTEGER NOT NULL, "
+ "deliveryaddress TEXT, linkreportid BIGINT, tags VARCHAR(255) ARRAY)");
await ExecQueryAsync("CREATE TABLE anotifyevent (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, created TIMESTAMPTZ NOT NULL, "
+ "socktype INTEGER NOT NULL, objectid BIGINT NOT NULL, name TEXT NOT NULL, eventtype INTEGER NOT NULL, notifysubscriptionid BIGINT NOT NULL REFERENCES anotifysubscription(id) ON DELETE CASCADE, "
+ "userid BIGINT NOT NULL REFERENCES auser (id) ON DELETE CASCADE, eventdate TIMESTAMPTZ NOT NULL, decvalue DECIMAL(38,18) NULL, message TEXT)");
await ExecQueryAsync("CREATE TABLE ainappnotification (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, userid BIGINT NOT NULL REFERENCES auser (id) ON DELETE CASCADE, "
+ "created TIMESTAMPTZ NOT NULL, socktype INTEGER NOT NULL, objectid BIGINT NOT NULL, name TEXT NOT NULL, agevalue INTERVAL, eventtype INTEGER NOT NULL, "
+ "decvalue DECIMAL(38,18) NULL, notifysubscriptionid BIGINT NOT NULL REFERENCES anotifysubscription(id) ON DELETE CASCADE, message TEXT, fetched BOOL NOT NULL)");
await ExecQueryAsync("CREATE TABLE anotifydeliverylog (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, processed TIMESTAMPTZ NOT NULL, "
+ "objectid BIGINT NOT NULL, notifysubscriptionid BIGINT NOT NULL, fail BOOL NOT NULL, error TEXT)");
//CUSTOMER "proxy" NOTIFICATION
await ExecQueryAsync("CREATE TABLE acustomernotifysubscription (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "translationid BIGINT NOT NULL REFERENCES atranslation (id) NOT NULL, languageoverride TEXT NOT NULL, timezoneoverride TEXT NOT NULL, "
+ "currencyname TEXT NOT NULL, hour12 BOOL NOT NULL, "
+ "customertags VARCHAR(255) ARRAY, socktype INTEGER NOT NULL, eventtype INTEGER NOT NULL, advancenotice INTERVAL NOT NULL, "
+ "idvalue BIGINT NOT NULL, decvalue DECIMAL(38,18) NOT NULL, agevalue INTERVAL NOT NULL, "
+ "linkreportid BIGINT, template TEXT NOT NULL, subject TEXT NOT NULL, tags VARCHAR(255) ARRAY)");
await ExecQueryAsync("CREATE TABLE acustomernotifyevent (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, created TIMESTAMPTZ NOT NULL, "
+ "socktype INTEGER NOT NULL, objectid BIGINT NOT NULL, name TEXT NOT NULL, eventtype INTEGER NOT NULL, customernotifysubscriptionid BIGINT NOT NULL REFERENCES acustomernotifysubscription(id) ON DELETE CASCADE, "
+ "customerid BIGINT NOT NULL REFERENCES acustomer (id) ON DELETE CASCADE, eventdate TIMESTAMPTZ NOT NULL, decvalue DECIMAL(38,18) NULL)");
await ExecQueryAsync("CREATE TABLE acustomernotifydeliverylog (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, processed TIMESTAMPTZ NOT NULL, "
+ "objectid BIGINT NOT NULL, customernotifysubscriptionid BIGINT NOT NULL, fail BOOL NOT NULL, error TEXT)");
//LOGO
await ExecQueryAsync("CREATE TABLE alogo (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, "
+ "large bytea, largetype TEXT, medium bytea, mediumtype TEXT, small bytea, smalltype TEXT)");
//REPORTS
await ExecQueryAsync("CREATE TABLE areport (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, name TEXT NOT NULL, active BOOL NOT NULL, "
+ "notes TEXT, roles INTEGER NOT NULL, sockType INTEGER NOT NULL, includewoitemdescendants BOOL, template TEXT, style TEXT, jsprerender TEXT, jshelpers TEXT, rendertype INTEGER NOT NULL, "
+ "headertemplate TEXT, footertemplate TEXT, displayheaderfooter BOOL, paperformat INTEGER NOT NULL, landscape BOOL, marginoptionsbottom TEXT, "
+ "marginoptionsleft TEXT, marginoptionsright TEXT, marginoptionstop TEXT, pageranges TEXT, prefercsspagesize BOOL, printbackground BOOL, scale DECIMAL(10,5), UNIQUE(name,sockType))");
//Load the stock REPORT TEMPLATES
await Sockeye.Biz.PrimeData.PrimeReportTemplates();
//DASHBOARD
await ExecQueryAsync("CREATE TABLE adashboardview (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, userid BIGINT NOT NULL UNIQUE, view TEXT NOT NULL)");
await SetSchemaLevelAsync(++currentSchema);
}
//////////////////////////////////////////////////
//
// Beta .2 new translation keys for import feature
//
if (currentSchema < 2)
{
LogUpdateMessage(log);
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ImportNewRecords', 'Import new records' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'UpdateExistingRecords', 'Update existing records' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AdminImportUpdateWarning', 'Warning: you are about to permanently change multiple objects.\r\nAre you sure?' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'FileToImport', 'File to import' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ProcessCompleted', 'Process completed' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ImportNewRecords', 'Importar nuevos registros' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'UpdateExistingRecords', 'Actualizar registros existentes' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AdminImportUpdateWarning', 'Advertencia: está a punto de cambiar varios objetos de forma permanente.\r\n¿Está seguro?' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'FileToImport', 'Archivo a importar' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ProcessCompleted', 'Proceso completado' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ImportNewRecords', 'Importer de nouveaux enregistrements' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'UpdateExistingRecords', 'Mettre à jour les enregistrements existants' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AdminImportUpdateWarning', 'Avertissement: vous êtes sur le point de modifier définitivement plusieurs objets.\r\nÊtes-vous sûr?' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'FileToImport', 'Fichier à importer' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ProcessCompleted', 'Processus terminé' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ImportNewRecords', 'Importieren Sie neue Datensätze' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'UpdateExistingRecords', 'Aktualisieren Sie vorhandene Datensätze' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AdminImportUpdateWarning', 'Warnung: Sie sind dabei, mehrere Objekte dauerhaft zu ändern.\r\nSind Sie sicher?' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'FileToImport', 'Zu importierende Datei' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'ProcessCompleted', 'Prozess abgeschlossen' FROM atranslation t where t.baselanguage = 'de'");
await SetSchemaLevelAsync(2);
}
////////////////////////////////////////////////
// rc 1 remove default rate from loan unit
//
if (currentSchema < 3)
{
LogUpdateMessage(log);
await ExecQueryAsync("DELETE FROM atranslationitem where key = 'LoanUnitDefaultRate'");
await SetSchemaLevelAsync(3);
}
////////////////////////////////////////////////
// rc 2 integration objects for QBI etc
//
if (currentSchema < 4)
{
LogUpdateMessage(log);
//INTEGRATION
await ExecQueryAsync("CREATE TABLE aintegration (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, integrationappid uuid NOT NULL UNIQUE, name TEXT NOT NULL UNIQUE, active BOOL NOT NULL, "
+ "integrationdata TEXT )");
//INTEGRATIONITEM
await ExecQueryAsync("CREATE TABLE aintegrationitem (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, integrationid BIGINT NOT NULL REFERENCES aintegration ON DELETE CASCADE, "
+ "sockType INTEGER NOT NULL, objectid BIGINT NOT NULL, integrationitemid TEXT NOT NULL, integrationitemname TEXT, lastsync TIMESTAMPTZ, integrationitemdata TEXT "
+ ")");
//INTEGRATIONLOG
await ExecQueryAsync("CREATE TABLE aintegrationlog (id BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, integrationid BIGINT NOT NULL REFERENCES aintegration ON DELETE CASCADE, created TIMESTAMPTZ NOT NULL, statustext TEXT NOT NULL)");
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationList', 'Integrated applications' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'Integration', 'Integrated application' FROM atranslation t where t.baselanguage = 'en'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationName', 'Name' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationList', 'Aplicaciones integradas' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'Integration', 'Aplicación integrada' FROM atranslation t where t.baselanguage = 'es'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationName', 'Nombre' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationList', 'Applications intégrées' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'Integration', 'Application intégrée' FROM atranslation t where t.baselanguage = 'fr'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationName', 'Nom' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationList', 'Integrierte Anwendungen' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'Integration', 'Integrierte Anwendung' FROM atranslation t where t.baselanguage = 'de'");
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'IntegrationName', 'Name' FROM atranslation t where t.baselanguage = 'de'");
//UPDATED NAME FETCHER FOR INTEGRATION
await ExecQueryAsync(@"
CREATE OR REPLACE FUNCTION PUBLIC.AYGETNAME(IN AYOBJECTID BIGINT, IN AYATYPE INTEGER,TRANSLATIONID integer) RETURNS TEXT AS $BODY$
DECLARE
aytable TEXT DEFAULT '';
aynamecolumn TEXT DEFAULT 'name';
aytkey TEXT DEFAULT 'no';
returnstr TEXT DEFAULT '';
BEGIN
case socktype
when 0 then aytkey= 'NoType';
when 1 then aytkey= 'Global';
when 2 then return 'FormUserOptions';
when 3 then aytable = 'auser';
when 4 then aytkey= 'ServerState';
when 6 then aytkey= 'LogFile';
when 7 then aytkey= 'PickListTemplate';
when 8 then aytable = 'acustomer';
when 9 then aytkey= 'ServerJob';
when 12 then aytkey= 'ServerMetrics';
when 13 then aytable = 'atranslation';
when 14 then aytkey= 'UserOptions';
when 15 then aytable = 'aheadoffice';
when 17 then aytable = 'afileattachment'; aynamecolumn ='displayfilename';
when 18 then aytable = 'adatalistsavedfilter';
when 19 then aytable = 'aformcustom'; aynamecolumn = 'formkey';
when 47 then aytkey= 'GlobalOps';
when 48 then aytkey= 'BizMetrics';
when 49 then aytkey= 'Backup';
when 50 then aytable = 'ainappnotification';
when 51 then aytkey= 'NotifySubscription';
when 52 then aytable = 'areminder';
when 56 then aytkey= 'OpsNotificationSettings';
when 57 then aytable = 'areport';
when 58 then aytkey= 'DashBoardView';
when 59 then aytable = 'acustomernote'; aynamecolumn = 'notedate';
when 60 then aytable = 'amemo';
when 61 then aytable = 'areview';
when 68 then return format('DataListColumnView %L', ayobjectid);
when 84 then aytkey= 'CustomerNotifySubscription';
when 92 then aytable = 'aintegration';
else
RETURN returnstr;
end case;
IF aytkey='no' then
EXECUTE format('SELECT %I FROM %I WHERE id = %L', aynamecolumn, aytable, ayobjectid) INTO returnstr;
else
EXECUTE format('select display from atranslationitem where translationid=%L and key=%L', TRANSLATIONID, aytkey) INTO returnstr;
END if;
RETURN returnstr;
END;
$BODY$ LANGUAGE PLPGSQL STABLE");
await SetSchemaLevelAsync(4);
}
//////////////////////////////////////////////////
//
// 8.0.7 additions for customer contact licensing (allowlogin)
//
if (currentSchema < 6)
{
LogUpdateMessage(log);
//!!WARNING: changes TO AUSER need to be reflected in dbutil::EmptyBizDataFromDatabaseForSeedingOrImportingAsync auser_backup
await ExecQueryAsync("ALTER TABLE auser ADD column allowlogin BOOL");
await ExecQueryAsync("UPDATE auser SET allowlogin=true WHERE active=true");
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AllowLogin', 'Allow login' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AllowLogin', 'Permitir acceso' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AllowLogin', 'Autoriser la connexion' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AllowLogin', 'Login erlauben' FROM atranslation t where t.baselanguage = 'de'");
currentSchema = 6;
await SetSchemaLevelAsync(currentSchema);
}
//////////////////////////////////////////////////
//
// 8.0.14 additions for job feedback
//
if (currentSchema < 8)
{
LogUpdateMessage(log);
await ExecQueryAsync("ALTER TABLE aopsjob ADD column progress TEXT");
currentSchema = 8;
await SetSchemaLevelAsync(currentSchema);
}
//////////////////////////////////////////////////
//
// case 4242
//
if (currentSchema < 11)
{
LogUpdateMessage(log);
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NewCustomer', 'New customer' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NewCustomer', 'Nuevo cliente' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NewCustomer', 'Nouveau client' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NewCustomer', 'Neukunde' FROM atranslation t where t.baselanguage = 'de'");
currentSchema = 11;
await SetSchemaLevelAsync(currentSchema);
}
//////////////////////////////////////////////////
//
// case 4173
//
if (currentSchema < 12)
{
LogUpdateMessage(log);
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NotifyEventDirectSMTPMessage', 'On request SMTP' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NotifyEventDirectSMTPMessage', 'Bajo petición SMTP' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NotifyEventDirectSMTPMessage', 'SMTP demandé' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'NotifyEventDirectSMTPMessage', 'Auf Anfrage SMTP' FROM atranslation t where t.baselanguage = 'de'");
currentSchema = 12;
await SetSchemaLevelAsync(currentSchema);
}
//////////////////////////////////////////////////
//
// case 4341
//
if (currentSchema < 14)
{
LogUpdateMessage(log);
await ExecQueryAsync("ALTER TABLE aglobalbizsettings ADD COLUMN addresspostal TEXT;");
await ExecQueryAsync("ALTER TABLE acustomer ADD COLUMN addresspostal TEXT;");
await ExecQueryAsync("ALTER TABLE aheadoffice ADD COLUMN addresspostal TEXT;");
//english translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AddressPostal', 'Postal / ZIP code' FROM atranslation t where t.baselanguage = 'en'");
//spanish translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AddressPostal', 'Código postal' FROM atranslation t where t.baselanguage = 'es'");
//french translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AddressPostal', 'Code postal' FROM atranslation t where t.baselanguage = 'fr'");
//german translations
await ExecQueryAsync("INSERT INTO atranslationitem(translationid,key,display) SELECT t.id, 'AddressPostal', 'Postleitzahl' FROM atranslation t where t.baselanguage = 'de'");
currentSchema = 14;
await SetSchemaLevelAsync(currentSchema);
}
//#########################################
//!!!!WARNING: BE SURE TO UPDATE THE DbUtil::EmptyBizDataFromDatabaseForSeedingOrImporting WHEN NEW TABLES ADDED!!!!
//////////////////////////////////////////////////
// FUTURE
// if (currentSchema < xx)
// {
// LogUpdateMessage(log);
// exec queries here to do updates
// currentSchema=xx;
// await SetSchemaLevelAsync(currentSchema);
// }
/*AdminImportUpdateWarning
INSERT INTO atranslationitem(translationid,key,display)
SELECT t.id, 'mytestkey', 'mytestdisplay'
FROM atranslation t
where t.baselanguage = 'en'
*/
//Handle newer schema than expected (user ran a new version then downgraded ayanova)
if (currentSchema > DESIRED_SCHEMA_LEVEL)
throw new ArgumentOutOfRangeException($"DB Schema error: the database has a newer schema version {currentSchema} than this version of Sockeye expects {DESIRED_SCHEMA_LEVEL}\nThis version of Sockeye is older than a previous version that was used with this database and is not compatible with the new database format.\nUpgrade Sockeye or restore a backup of the database from before the newer version of Sockeye was used.");
log.LogInformation("Completed updating database schema to version {0}", currentSchema);
//*************************************************************************************
if (PrimeEmptyDB)
{
// //Load the default TRANSLATIONS
// await Sockeye.Biz.PrimeData.PrimeTranslations();
//Prime the db with the default SuperUser account
await Sockeye.Biz.PrimeData.PrimeSuperUserAccount(ct);
}
}//eofunction
private static async Task SetSchemaLevelAsync(int nCurrentSchema)
{
await ExecQueryAsync("UPDATE aschemaversion SET schema=" + nCurrentSchema.ToString());
}
//execute command query
private static async Task ExecQueryAsync(string q)
{
using (var cm = ct.Database.GetDbConnection().CreateCommand())
{
await ct.Database.OpenConnectionAsync();
cm.CommandText = q;
await cm.ExecuteNonQueryAsync();
await ct.Database.CloseConnectionAsync();
}
}
private static void LogUpdateMessage(ILogger log)
{
log.LogDebug($"Updating database to schema version {currentSchema + 1}");
}
//eoclass
}
//eons
}

68
server/util/CopyObject.cs Normal file
View File

@@ -0,0 +1,68 @@
using System;
using System.Reflection;
using System.Linq;
namespace Sockeye.Util
{
internal static class CopyObject
{
/// <summary>
/// Copies the data of one object to another. The target object 'pulls' properties of the first.
/// This any matching properties are written to the target.
///
/// The object copy is a shallow copy only. Any nested types will be copied as
/// whole values rather than individual property assignments (ie. via assignment)
/// </summary>
/// <param name="source">The source object to copy from</param>
/// <param name="target">The object to copy to</param>
/// <param name="excludedProperties">A comma delimited list of properties that should not be copied</param>
/// <param name="memberAccess">Reflection binding access</param>
public static void Copy(object source, object target, string excludedProperties="", BindingFlags memberAccess = BindingFlags.Public | BindingFlags.Instance)
{
string[] excluded = null;
if (!string.IsNullOrEmpty(excludedProperties))
{
excludedProperties=excludedProperties.Replace(", ", ",").Replace(" ,",",").Trim();
excluded = excludedProperties.Split(new char[1] { ',' }, StringSplitOptions.RemoveEmptyEntries);
}
MemberInfo[] miT = target.GetType().GetMembers(memberAccess);
foreach (MemberInfo Field in miT)
{
string name = Field.Name;
// Skip over any property exceptions
if (!string.IsNullOrEmpty(excludedProperties) &&
excluded.Contains(name))
continue;
if (Field.MemberType == MemberTypes.Field)
{
FieldInfo SourceField = source.GetType().GetField(name);
if (SourceField == null)
continue;
object SourceValue = SourceField.GetValue(source);
((FieldInfo)Field).SetValue(target, SourceValue);
}
else if (Field.MemberType == MemberTypes.Property)
{
PropertyInfo piTarget = Field as PropertyInfo;
PropertyInfo SourceField = source.GetType().GetProperty(name, memberAccess);
if (SourceField == null)
continue;
if (piTarget.CanWrite && SourceField.CanRead)
{
object SourceValue = SourceField.GetValue(source, null);
piTarget.SetValue(target, SourceValue, null);
}
}
}
}
}//eoc
}//eons

80
server/util/DataUtil.cs Normal file
View File

@@ -0,0 +1,80 @@
using System;
using System.Collections.Generic;
namespace Sockeye.Util
{
internal static class DataUtil
{
public static IEnumerable<Tuple<double, double>> LargestTriangleThreeBuckets(List<Tuple<double, double>> data, int threshold)
{
int dataLength = data.Count;
if (threshold >= dataLength || threshold == 0)
return data; // Nothing to do
List<Tuple<double, double>> sampled = new List<Tuple<double, double>>(threshold);
// Bucket size. Leave room for start and end data points
double every = (double)(dataLength - 2) / (threshold - 2);
int a = 0;
Tuple<double, double> maxAreaPoint = new Tuple<double, double>(0, 0);
int nextA = 0;
sampled.Add(data[a]); // Always add the first point
for (int i = 0; i < threshold - 2; i++)
{
// Calculate point average for next bucket (containing c)
double avgX = 0;
double avgY = 0;
int avgRangeStart = (int)(Math.Floor((i + 1) * every) + 1);
int avgRangeEnd = (int)(Math.Floor((i + 2) * every) + 1);
avgRangeEnd = avgRangeEnd < dataLength ? avgRangeEnd : dataLength;
int avgRangeLength = avgRangeEnd - avgRangeStart;
for (; avgRangeStart < avgRangeEnd; avgRangeStart++)
{
avgX += data[avgRangeStart].Item1; // * 1 enforces Number (value may be Date)
avgY += data[avgRangeStart].Item2;
}
avgX /= avgRangeLength;
avgY /= avgRangeLength;
// Get the range for this bucket
int rangeOffs = (int)(Math.Floor((i + 0) * every) + 1);
int rangeTo = (int)(Math.Floor((i + 1) * every) + 1);
// Point a
double pointAx = data[a].Item1; // enforce Number (value may be Date)
double pointAy = data[a].Item2;
double maxArea = -1;
for (; rangeOffs < rangeTo; rangeOffs++)
{
// Calculate triangle area over three buckets
double area = Math.Abs((pointAx - avgX) * (data[rangeOffs].Item2 - pointAy) -
(pointAx - data[rangeOffs].Item1) * (avgY - pointAy)
) * 0.5;
if (area > maxArea)
{
maxArea = area;
maxAreaPoint = data[rangeOffs];
nextA = rangeOffs; // Next a is this b
}
}
sampled.Add(maxAreaPoint); // Pick this point from the bucket
a = nextA; // This a is the next a (chosen b)
}
sampled.Add(data[dataLength - 1]); // Always add last
return sampled;
}
}//eoc
}//eons

159
server/util/DateUtil.cs Normal file
View File

@@ -0,0 +1,159 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace Sockeye.Util
{
internal static class DateUtil
{
/// <summary>
/// Is the current date after the referenced date by at least the duration specified
/// </summary>
/// <param name="startDate">UTC start point to compare to current UTC date</param>
/// <param name="Hours"></param>
/// <param name="Minutes"></param>
/// <param name="Seconds"></param>
/// <returns></returns>
public static bool IsAfterDuration(DateTime startDate, int Hours, int Minutes = 0, int Seconds = 0)
{
TimeSpan ts = new TimeSpan(Hours, Minutes, Seconds);
return IsAfterDuration(startDate, ts);
}
/// <summary>
/// Is the current date after the referenced date by at least the timespan specified
/// </summary>
/// <param name="startDate">UTC start point to compare to current UTC date</param>
/// <param name="tspan"></param>
/// <returns></returns>
public static bool IsAfterDuration(DateTime startDate, TimeSpan tspan)
{
if (DateTime.UtcNow - startDate < tspan)
return false;
return true;
}
/// <summary>
/// An internally consistent empty or not relevant date marker:
/// January 1st 5555
/// </summary>
/// <returns></returns>
public static DateTime EmptyDateValue
{
get
{
return new DateTime(5555, 1, 1);
//Was going to use MaxValue but apparently that varies depending on culture
// and PostgreSQL has issues with year 1 as it interprets as year 2001
// so to be on safe side just defining one for all usage
}
}
/// <summary>
/// returns a UTC short date, short time formatted date for local display to end user in logs, errors etc at the server level
/// (Not related to UI display of dates and times)
/// </summary>
/// <param name="DateToDisplay"></param>
/// <returns></returns>
public static string ServerDateTimeString(DateTime DateToDisplay)
{
return DateToDisplay.ToLocalTime().ToString("g");
}
/// <summary>
/// Returns current date/time in sortable format
///(used for duplicate names by stringUtil and others)
/// </summary>
/// <returns></returns>
public static string SortableShortCurrentDateTimeValue
{
get
{
return DateTime.Now.ToString("s");
//Was going to use MaxValue but apparently that varies depending on culture
// and PostgreSQL has issues with year 1 as it interprets as year 2001
// so to be on safe side just defining one for all usage
}
}
/// <summary>
/// returns passed in date as a string format ISO8661 UTC date (no conversion of date is done, it's assumed to be in UTC already)
/// </summary>
/// <param name="DateToDisplay"></param>
/// <returns></returns>
public static string UniversalISO8661Format(DateTime DateToDisplay)
{
DateTime dtUTC = new DateTime(DateToDisplay.Ticks, DateTimeKind.Utc);
return dtUTC.ToString("o");
}
/// <summary>
/// returns passed in timespan to human readable format
/// </summary>
/// <param name="timeSpan"></param>
/// <returns></returns>
public static string FormatTimeSpan(TimeSpan timeSpan)
{
Func<Tuple<int, string>, string> tupleFormatter = t => $"{t.Item1} {t.Item2}{(t.Item1 == 1 ? string.Empty : "s")}";
var components = new List<Tuple<int, string>>
{
Tuple.Create((int) timeSpan.TotalDays, "day"),
Tuple.Create(timeSpan.Hours, "hour"),
Tuple.Create(timeSpan.Minutes, "minute"),
Tuple.Create(timeSpan.Seconds, "second"),
};
components.RemoveAll(i => i.Item1 == 0);
string extra = "";
if (components.Count > 1)
{
var finalComponent = components[components.Count - 1];
components.RemoveAt(components.Count - 1);
extra = $" and {tupleFormatter(finalComponent)}";
}
return $"{string.Join(", ", components.Select(tupleFormatter))}{extra}";
}
/// <summary>
/// returns passed in timespan to human readable format
/// as short as possible using passed in time span translations
/// </summary>
/// <returns></returns>
public static string FormatTimeSpan(TimeSpan timeSpan, string dayz = "days", string hourz = "hours", string minutez = "minutes", string secondz = "seconds")
{
if (timeSpan == TimeSpan.Zero)
return "";
var components = new List<Tuple<int, string>>
{
Tuple.Create((int) timeSpan.TotalDays, dayz),
Tuple.Create(timeSpan.Hours, hourz),
Tuple.Create(timeSpan.Minutes, minutez),
Tuple.Create(timeSpan.Seconds, secondz),
};
components.RemoveAll(i => i.Item1 == 0);
System.Text.StringBuilder sb = new System.Text.StringBuilder();
foreach (var t in components)
{
sb.Append(t.Item1);
sb.Append(" ");
sb.Append(t.Item2);
sb.Append(" ");
}
return sb.ToString().Trim();
}
}//eoc
}//eons

875
server/util/DbUtil.cs Normal file
View File

@@ -0,0 +1,875 @@
using System;
using Microsoft.Extensions.Logging;
using Sockeye.Models;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using System.Linq;
namespace Sockeye.Util
{
internal static class DbUtil
{
private static string _RawSockeyeConnectionString;
private static string _dbConnectionString;
private static string _dbName;
private static string _dbUserName;
private static string _dbPassword;
private static string _dbServer;
#region parse connection string
internal static void ParseConnectionString(ILogger _log, string SockeyeConnectionString)
{
if (string.IsNullOrWhiteSpace(SockeyeConnectionString))
{
_log.LogDebug("There is no database server connection string set, SOCKEYE_DB_CONNECTION is missing or empty. Will use default: \"Server=localhost;Username=postgres;Database=sockeye;\"");
SockeyeConnectionString = "Server=localhost;Username=postgres;Database=sockeye;";
}
_RawSockeyeConnectionString = SockeyeConnectionString;
var builder = new System.Data.Common.DbConnectionStringBuilder();
builder.ConnectionString = SockeyeConnectionString;
if (!builder.ContainsKey("database"))
{
_log.LogDebug("There is no database name specified (\"Database=<NAME>\") in connection string. Will use default: \"Database=sockeye;\"");
builder.Add("database", "sockeye");
}
//Keep track of default values
_dbConnectionString = builder.ConnectionString;
if (builder.ContainsKey("database"))
_dbName = builder["database"].ToString();
if (builder.ContainsKey("username"))
_dbUserName = builder["username"].ToString();
if (builder.ContainsKey("password"))
_dbPassword = builder["password"].ToString();
if (builder.ContainsKey("server"))
_dbServer = builder["server"].ToString();
_log.LogDebug("Sockeye will use the following connection string: {0}", PasswordRedactedConnectionString(_dbConnectionString));
}
///////////////////////////////////////////
//clean out password from connection string
//for log purposes
internal static string PasswordRedactedConnectionString(string cs)
{
var nStart = 0;
var nStop = 0;
var lwrcs = cs.ToLowerInvariant();
nStart = lwrcs.IndexOf("password");
if (nStart == -1)
{
//no password, just return it
return cs;
}
//find terminating semicolon
nStop = lwrcs.IndexOf(";", nStart);
if (nStop == -1 || nStop == lwrcs.Length)
{
//no terminating semicolon or that is the final character in the string
return cs.Substring(0, nStart + 9) + "[redacted];";
}
else
{
//not the last thing in the string so return the whole string minus the password part
return cs.Substring(0, nStart + 9) + "[redacted];" + cs.Substring(nStop + 1);
}
}
#endregion
#region Connection utilities
///////////////////////////////////////////
//Verify that server exists
//
private static string AdminConnectionString
{
get
{
return _dbConnectionString.Replace(_dbName, "postgres");
}
}
///////////////////////////////////////////
//Connection string without password
//
internal static string DisplayableConnectionString
{
get
{
return PasswordRedactedConnectionString(_dbConnectionString);
}
}
#endregion
#region DB verification
///////////////////////////////////////////
// Get database server version
//
internal static string DBServerVersion(Sockeye.Models.AyContext ct)
{
using (var cmd = ct.Database.GetDbConnection().CreateCommand())
{
ct.Database.OpenConnection();
cmd.CommandText = $"select version();";
using (var dr = cmd.ExecuteReader())
{
if (dr.Read())
{
if (dr.IsDBNull(0))
return "Unknown / no results";
else
return (dr.GetString(0));
}
else
{
return "Unknown / no results";
}
}
}
}
///////////////////////////////////////////
// Get database runtime parameter settings
//
internal static Dictionary<string, string> DBServerRunTimeParameters(Sockeye.Models.AyContext ct)
{
Dictionary<string, string> ret = new Dictionary<string, string>();
using (var cmd = ct.Database.GetDbConnection().CreateCommand())
{
ct.Database.OpenConnection();
cmd.CommandText = $"SHOW ALL;";
using (var dr = cmd.ExecuteReader())
{
while (dr.Read())
{
string name = string.Empty; ;
string setting = string.Empty;
if (!dr.IsDBNull(0))
name = dr.GetString(0);
if (!dr.IsDBNull(1))
setting = dr.GetString(1);
ret.Add(name, setting);
}
}
}
return ret;
}
///////////////////////////////////////////
//Verify that server exists
// spend up to 5 minutes waiting for it to come up before bailing
//
internal static bool DatabaseServerExists(ILogger log, string logPrepend)
{
try
{
//Try every 5 seconds for 60 tries before giving up (5 minutes total)
var maxRetryAttempts = 60;
var pauseBetweenFailures = TimeSpan.FromSeconds(5);
RetryHelper.RetryOnException(maxRetryAttempts, pauseBetweenFailures, log, logPrepend + DisplayableConnectionString, () =>
{
using (var conn = new Npgsql.NpgsqlConnection(AdminConnectionString))
{
conn.Open();
conn.Close();
}
});
}
catch
{
return false;
}
return true;
}
///////////////////////////////////////////////
// Set global flag if db server is connectable
//
internal static void CheckDatabaseServerAvailable(ILogger log)
{
//Called by generator when db is down to check if it can connect
if (CHECKING_DB_AVAILABLE) return;
//don't check too often just fills log files for no reason
if (DateTime.UtcNow - CHECKED_DB_AVAILABLE_LAST < CHECK_DB_AVAILABLE_EVERY_INTERVAL)
return;
CHECKING_DB_AVAILABLE = true;
if (CHECKING_DB_LOG_DOWN_STATUS)
{
log.LogInformation("Database server unreachable; pausing Job processing");
CHECKING_DB_LOG_DOWN_STATUS = false;
}
try
{
log.LogTrace("Database Down - checking if up yet");
using (AyContext ct = ServiceProviderProvider.DBContext)
{
var dummy = ct.GlobalBizSettings.FirstOrDefault(z => z.Id == 1);
}
}
catch
{
return;
}
finally
{
CHECKED_DB_AVAILABLE_LAST = DateTime.UtcNow;
CHECKING_DB_AVAILABLE = false;
}
//We have db available
log.LogInformation("Database server has become available; resuming Job processing");
ServerGlobalOpsSettingsCache.DBAVAILABLE = true;
CHECKING_DB_LOG_DOWN_STATUS = true;
}
private static bool CHECKING_DB_AVAILABLE = false;
private static bool CHECKING_DB_LOG_DOWN_STATUS = true;
private static DateTime CHECKED_DB_AVAILABLE_LAST = DateTime.MinValue;
private static TimeSpan CHECK_DB_AVAILABLE_EVERY_INTERVAL = new TimeSpan(0, 0, 30);
///////////////////////////////////////////////////////////
// Check if exception means db server is unavailable
// if so, flag global flag indicating it isn't
//
internal static void HandleIfDatabaseUnavailableTypeException(Exception ex)
{
if (ex == null) return;
if (ex.Message.Contains("transient failure") && ex.Source.Contains("PostgreSQL"))
ServerGlobalOpsSettingsCache.DBAVAILABLE = false;
}
///////////////////////////////////////////
//Verify that database exists, if not, then create it
//
internal static bool EnsureDatabaseExists(ILogger _log)
{
_log.LogDebug("Ensuring database exists. Connection string is: \"{0}\"", DisplayableConnectionString);
using (var conn = new Npgsql.NpgsqlConnection(_dbConnectionString))
{
try
{
conn.Open();
conn.Close();
}
catch (Exception e)
{
//if it's a db doesn't exist that's ok, we'll create it, not an error
if (e is Npgsql.PostgresException)
{
if (((Npgsql.PostgresException)e).SqlState == "3D000")
{
//create the db here
using (var cnCreate = new Npgsql.NpgsqlConnection(AdminConnectionString))
{
cnCreate.Open();
// Create the database desired
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = cnCreate;
cmd.CommandText = "CREATE DATABASE \"" + _dbName + "\" WITH ENCODING \"UTF8\" TEMPLATE=template0;";
cmd.ExecuteNonQuery();
_log.LogInformation("Database \"{0}\" created successfully!", _dbName);
}
cnCreate.Close();
}
}
else
{
var err = string.Format("Database server connection failed. Connection string is: \"{0}\"", DisplayableConnectionString);
_log.LogCritical(e, "BOOT: E1000 - " + err);
err = err + "\nError reported was: " + e.Message;
throw new ApplicationException(err);
}
}
}
}
return true;
}
#endregion
#region DB utilities
///////////////////////////////////////////
// Drop and re-create db
// This is the NUCLEAR option and
// completely ditches the DB and all user uploaded files
//
internal static async Task DropAndRecreateDbAsync(ILogger _log)
{
_log.LogInformation("Dropping and creating Database \"{0}\"", _dbName);
//clear all connections so that the database can be dropped
Npgsql.NpgsqlConnection.ClearAllPools();
using (var conn = new Npgsql.NpgsqlConnection(AdminConnectionString))
{
await conn.OpenAsync();
// Create the database desired
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
cmd.CommandText = "DROP DATABASE \"" + _dbName + "\";";
await cmd.ExecuteNonQueryAsync();
cmd.Connection = conn;
cmd.CommandText = "CREATE DATABASE \"" + _dbName + "\" WITH ENCODING \"UTF8\" TEMPLATE=template0;";
await cmd.ExecuteNonQueryAsync();
_log.LogDebug("Database created");
}
await conn.CloseAsync();
}
//final cleanup step is to erase user uploaded files
FileUtil.EraseEntireContentsOfAttachmentFilesFolder();
}
/////////////////////////////////////////////////////////
// Erase all user entered data from the db
// This is called by seeder for trial seeding purposes
// and by v8 migrate and by license controller when erasing db
internal static async Task EmptyBizDataFromDatabaseForSeedingOrImportingAsync(ILogger _log, bool keepTaxCodes = false)
{
//case 4221
//erase plan to use truncate table instead of slower delete method
//- truncate table cascade deletes which removes all user accounts including superuser
//- duplicate the auser and auseroption tables to a temporary table to preserve them
//- do the full truncate
//- select the superuser account back to the auser and useroptions
//- discover where else this needs to be done
//- $profit
_log.LogInformation("Erasing Database \"{0}\"", _dbName);
Sockeye.Api.ControllerHelpers.ApiServerState apiServerState = (Sockeye.Api.ControllerHelpers.ApiServerState)ServiceProviderProvider.Provider.GetService(typeof(Sockeye.Api.ControllerHelpers.ApiServerState));
apiServerState.SetClosed("Erasing database");
//clear all connections so that the database can be dropped
Npgsql.NpgsqlConnection.ClearAllPools();
using (var conn = new Npgsql.NpgsqlConnection(_dbConnectionString))
{
await conn.OpenAsync();
//prepare to delete by removing foreign keys
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
cmd.CommandText = "update auser set customerid=null;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "update auser set headofficeid=null;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "update auser set vendorid=null;";
await cmd.ExecuteNonQueryAsync();
//DELIBERATELY NOT ERASED:
//NOTE: these tables commented below are deliberately not affected as it's assumed for a **COMPLETE** erase users will erase manually or use the config setting to completely drop the database
//For anything less than that it's assumed users are testing things out and want to keep these tables but erase the biz data
//aglobalopsbackupsettings
//aglobalopsnotificationsettings
//aglobalbizsettings
//alogo
//areport
//await EraseTableAsync("aopsjoblog", conn, true);
// await EraseTableAsync("aopsjob", conn, true);
}
//Delete non stock translations
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
//set to default translation so can delete all non default ones
cmd.CommandText = "update auseroptions set translationid=1;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "delete from atranslationitem where translationid > 4;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "delete from atranslation where id > 4;";
await cmd.ExecuteNonQueryAsync();
}
//case 4221 truncate support
//BACKUP USER AND DATA TO BE PRESERVED THAT TRUNCATE WILL CASCADE DELETE
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
cmd.CommandText = "CREATE TABLE auser_backup AS TABLE auser;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "CREATE TABLE auseroptions_backup AS TABLE auseroptions;";
await cmd.ExecuteNonQueryAsync();
}
//REMOVE ALL REMAINING DATA
await TruncateTableAsync("afileattachment", conn);
await TruncateTableAsync("aevent", conn);
await TruncateTableAsync("adatalistsavedfilter", conn);
await TruncateTableAsync("adatalistcolumnview", conn);
await TruncateTableAsync("apicklisttemplate", conn, true);
await TruncateTableAsync("aformcustom", conn);
await TruncateTableAsync("asearchkey", conn);
await TruncateTableAsync("asearchdictionary", conn);
await TruncateTableAsync("atag", conn);
await TruncateTableAsync("acustomernote", conn);
await TruncateTableAsync("acustomer", conn);
await TruncateTableAsync("aheadoffice", conn);
//----- NOTIFICATION
await TruncateTableAsync("ainappnotification", conn);
await TruncateTableAsync("anotifyevent", conn);
await TruncateTableAsync("anotifydeliverylog", conn);
await TruncateTableAsync("anotifysubscription", conn);
await TruncateTableAsync("acustomernotifyevent", conn);
await TruncateTableAsync("acustomernotifydeliverylog", conn);
await TruncateTableAsync("acustomernotifysubscription", conn);
await TruncateTableAsync("amemo", conn);
await TruncateTableAsync("areminder", conn);//depends on User
await TruncateTableAsync("areview", conn);//depends on User
await TruncateTableAsync("ametricmm", conn, true);
await TruncateTableAsync("ametricdd", conn, true);
await TruncateTableAsync("adashboardview", conn);
await TruncateTableAsync("aintegration", conn);
//############# WARNING: there can be unintended consequences easily if new tables or fields are added that REFERENCE other tables triggering a cascade delete unexpectedly
//be sure about that before making changes and test thoroughly anything that calls this method:
// the seeding and manual erase and v8-migrate code when making such changes
//case 4221 truncate support
//COPY BACK USER AND DATA TO BE PRESERVED THAT TRUNCATE WILL CASCADE DELETE
using (var cmd = new Npgsql.NpgsqlCommand())
{
//AT this point the truncate commands in erasetable above have caused all user and useroptions to be deleted
//so no need to clean out those tables, instead put our backup superuser back in again
cmd.Connection = conn;
cmd.CommandText = "INSERT INTO auser (active, name, lastlogin, login, password, salt , roles, currentauthtoken, "
+ "dlkey, dlkeyexpire, totpsecret, temptoken, twofactorenabled, passwordresetcode, passwordresetcodeexpire, usertype, "
+ "employeenumber, notes, wiki, customfields, tags, allowlogin) "
+ "SELECT active, name, lastlogin, login, password, salt , roles, currentauthtoken, "
+ "dlkey, dlkeyexpire, totpsecret, temptoken, twofactorenabled, passwordresetcode, passwordresetcodeexpire, usertype, "
+ "employeenumber, notes, wiki, customfields, tags, allowlogin "
+ "FROM auser_backup where id = 1;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "INSERT INTO auseroptions (userid, translationid, languageoverride, timezoneoverride, "
+ "currencyname, hour12, emailaddress, phone1, phone2, phone3, mapurltemplate, uicolor) "
+ "SELECT userid, translationid, languageoverride, timezoneoverride, "
+ "currencyname, hour12, emailaddress, phone1, phone2, phone3, mapurltemplate, uicolor "
+ "FROM auseroptions_backup where userid = 1;";
await cmd.ExecuteNonQueryAsync();
cmd.CommandText = "DROP TABLE IF EXISTS AUSEROPTIONS_BACKUP, AUSER_BACKUP;";
await cmd.ExecuteNonQueryAsync();
}
await conn.CloseAsync();
}
//If we got here then it's safe to erase the attachment files
FileUtil.EraseEntireContentsOfAttachmentFilesFolder();
_log.LogInformation("Importing any missing stock Report templates");
await Sockeye.Biz.PrimeData.PrimeReportTemplates();
apiServerState.ResumePriorState();
_log.LogInformation("Database erase completed");
}
///////////////////////////////////////////
// Truncate all data from the table specified
//
private static async Task TruncateTableAsync(string sTable, Npgsql.NpgsqlConnection conn, bool tableHasNoSequence = false)
{
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
cmd.CommandText = "TRUNCATE \"" + sTable + "\" RESTART IDENTITY CASCADE;";
await cmd.ExecuteNonQueryAsync();
}
}
///////////////////////////////////////////
// Erase all data from the table specified
//
private static async Task EraseTableAsync(string sTable, Npgsql.NpgsqlConnection conn, bool tableHasNoSequence = false)
{
//this variant is used for tables that don't need to be truncated due to smaller size
//and would trigger need to backup referenced tables first
using (var cmd = new Npgsql.NpgsqlCommand())
{
cmd.Connection = conn;
cmd.CommandText = $"delete from {sTable};";
await cmd.ExecuteNonQueryAsync();
if (!tableHasNoSequence)
{
cmd.CommandText = $"ALTER SEQUENCE {sTable}_id_seq RESTART WITH 1;";
await cmd.ExecuteNonQueryAsync();
}
}
}
///////////////////////////////////////////
// Check if DB is empty
// CALLED BY LICENSE CONTROLLER AND LICENSE.CS FOR TRIAL Request check
internal static async Task<bool> DBIsEmptyAsync(AyContext ct, ILogger _log)
{
//For efficiency just check a few main tables just stuff that would be shitty to have to re-enter
//Mostly user, customer and vendor cover it because nearly everything else requires those to have any sort of data at all
_log.LogDebug("DB empty check");
//An empty db contains only one User
if (await ct.User.LongCountAsync() > 1) return false;
if (await ct.Customer.AnyAsync()) return false;
if (await ct.HeadOffice.AnyAsync()) return false;
return true;
}
///////////////////////////////////////////
// Check if DB has evaluation user accounts
// CALLED BY by login ping from client via notify controller
internal static async Task<bool> DBHasTrialUsersAsync(AyContext ct, ILogger _log)
{
_log.LogDebug("DB trial users presence check for pre-login ping");
//There are 22 trial users (more but for internal use) in a trial database
if (await ct.User.LongCountAsync() < 22) return false;
//just check for a few for testing
if (await ct.User.AsNoTracking()
.Where(z =>
z.Login == "BizAdmin" ||
z.Login == "Service" ||
z.Login == "Inventory" ||
z.Login == "Accounting" ||
z.Login == "Tech"
).LongCountAsync() < 5) return false;
return true;
}
///////////////////////////////////////////
// Ensure the db is not modified
//
internal static async Task CheckFingerPrintAsync(
long ExpectedColumns,
long ExpectedIndexes,
long ExpectedCheckConstraints,
long ExpectedForeignKeyConstraints,
long ExpectedViews,
long ExpectedRoutines,
ILogger _log)
{
_log.LogDebug("Checking DB integrity");
long actualColumns = 0;
long actualIndexes = 0;
long actualCheckConstraints = 0;
long actualForeignKeyConstraints = 0;
long actualViews = 0;
long actualRoutines = 0;
//COLUMNS
using (var conn = new Npgsql.NpgsqlConnection(_dbConnectionString))
{
await conn.OpenAsync();
using (var command = conn.CreateCommand())
{
//Count all columns in all our tables
command.CommandText = "SELECT count(*) FROM information_schema.columns where table_schema='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualColumns = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain COLUMN data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
//INDEXES
using (var command = conn.CreateCommand())
{
//Count all indexes in all our tables
command.CommandText = "select Count(*) from pg_indexes where schemaname='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualIndexes = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain INDEX data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
//CHECK CONSTRAINTS
using (var command = conn.CreateCommand())
{
command.CommandText = "SELECT count(*) FROM information_schema.check_constraints where constraint_schema='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualCheckConstraints = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain CHECK CONSTRAINT data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
//FOREIGN KEY CONSTRAINTS
using (var command = conn.CreateCommand())
{
command.CommandText = "SELECT count(*) FROM information_schema.referential_constraints where constraint_schema='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualForeignKeyConstraints = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain FOREIGN KEY CONSTRAINT data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
//VIEWS
using (var command = conn.CreateCommand())
{
command.CommandText = "SELECT count(*) FROM information_schema.views where table_schema='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualViews = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain VIEW data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
//ROUTINES
using (var command = conn.CreateCommand())
{
command.CommandText = "SELECT count(*) FROM information_schema.routines where routine_schema='public'";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
//check the values
await result.ReadAsync();
actualRoutines = result.GetInt64(0);
}
else
{
var err = "E1030 - Database integrity check failed, could not obtain ROUTINE data. Contact support.";
_log.LogCritical(err);
throw new ApplicationException(err);
}
}
}
await conn.CloseAsync();
if (ExpectedColumns != actualColumns
|| ExpectedIndexes != actualIndexes
|| ExpectedCheckConstraints != actualCheckConstraints
|| ExpectedForeignKeyConstraints != actualForeignKeyConstraints
|| ExpectedRoutines != actualRoutines
|| ExpectedViews != actualViews)
{
var err = $"E1030 - Database integrity check failed (C{actualColumns}:I{actualIndexes}:CC{actualCheckConstraints}:FC{actualForeignKeyConstraints}:V{actualViews}:R{actualRoutines})";
_log.LogCritical(err);
throw new ApplicationException(err);
}
return;
}
}
///////////////////////////////////////////
// Given a table name return the count of records in that table
// Used for metrics
//
///
internal static async Task<long> CountOfRecordsAsync(string TableName)
{
long ret = 0;
using (var conn = new Npgsql.NpgsqlConnection(_dbConnectionString))
{
await conn.OpenAsync();
using (var command = conn.CreateCommand())
{
command.CommandText = $"SELECT count(*) FROM {TableName}";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
await result.ReadAsync();
ret = result.GetInt64(0);
}
}
}
await conn.CloseAsync();
}
return ret;
}
///////////////////////////////////////////
// Returns all table names that are ours in current schema
//
///
internal static async Task<List<string>> GetAllTablenamesAsync()
{
List<string> ret = new List<string>();
using (var conn = new Npgsql.NpgsqlConnection(_dbConnectionString))
{
await conn.OpenAsync();
using (var command = conn.CreateCommand())
{
command.CommandText = "SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE';";
using (var result = await command.ExecuteReaderAsync())
{
if (result.HasRows)
{
while (await result.ReadAsync())
{
ret.Add(result.GetString(0));
}
}
}
}
await conn.CloseAsync();
}
return ret;
}
#endregion
}//eoc
}//eons

View File

@@ -0,0 +1,36 @@
using System;
/// <summary>
/// Get custom attribute extension
/// </summary>
public static class EnumExtension
{
/// <summary>
/// Check if enum has attribute type
/// Example usage bool c = Biz.SockType.License.HasAttribute(typeof(Biz.AttachableAttribute));
///
/// </summary>
/// <param name="value"></param>
/// <param name="t"></param>
/// <returns></returns>
public static bool HasAttribute(this Enum value, Type t)
{
var type = value.GetType();
var name = Enum.GetName(type, value);
if (name != null)
{
var field = type.GetField(name);
if (field != null)
{
var attr =
Attribute.GetCustomAttribute(field, t);
if (attr != null)
{
return true;
}
}
}
return false;
}
}

View File

@@ -0,0 +1,32 @@
using System;
using System.Text;
namespace Sockeye.Util
{
internal static class ExceptionUtil
{
/// <summary>
/// Extract and return exception message
/// Handles innermost exceptions level by level
/// </summary>
/// <param name="ex"></param>
/// <returns></returns>
public static string ExtractAllExceptionMessages(Exception ex)
{
StringBuilder sb = new StringBuilder();
while (ex != null)
{
sb.AppendLine($"{ex.Source} -> {ex.Message}");
ex = ex.InnerException;
}
return sb.ToString();
}
}//eoc
}//eons

27
server/util/FileHash.cs Normal file
View File

@@ -0,0 +1,27 @@
using System.IO;
using System.Security.Cryptography;
using System;
namespace Sockeye.Util
{
internal static class FileHash
{
internal static string GetChecksum(string filePath)
{
using (FileStream stream = File.OpenRead(filePath))
{
//SHA256Managed sha = new SHA256Managed();
var sha= SHA256.Create();
byte[] checksum = sha.ComputeHash(stream);
return BitConverter.ToString(checksum).Replace("-", String.Empty);
}
}
}//eoc
}//eons

879
server/util/FileUtil.cs Normal file
View File

@@ -0,0 +1,879 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using System.IO;
using System.IO.Compression;
using System.Collections.Generic;
using Microsoft.Extensions.Logging;
using Microsoft.EntityFrameworkCore;
using Sockeye.Models;
using Sockeye.Biz;
using System.Reflection;
namespace Sockeye.Util
{
/*
- Quickly generate large files in windows: http://tweaks.com/windows/62755/quickly-generate-large-test-files-in-windows/
*/
internal static class FileUtil
{
#region Folder ensurance
/// <summary>
/// Ensurs folders exist and are not identical
/// Throws an exception of they are found to be identical preventing startup
/// The reason for this is to prevent a future erase database operation (which erases all attachment files)
/// from erasing backups which might prevent recovery in case someone accidentally erases their database
/// </summary>
/// <returns></returns>
internal static void EnsureUserAndUtilityFoldersExistAndAreNotIdentical()
{
// //UserFiles
// if (string.IsNullOrWhiteSpace(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH))
// ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH = Path.Combine(contentRootPath, "userfiles");
// //BackupFiles
// if (ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH == null)
// ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH = Path.Combine(contentRootPath, "backupfiles");
// //Temporary system files (reports etc)
// if (ServerBootConfig.SOCKEYE_TEMP_FILES_PATH == null)
// ServerBootConfig.SOCKEYE_TEMP_FILES_PATH = Path.Combine(contentRootPath, "tempfiles");
//Prevent using the same folder for both
if (
string.Equals(Path.GetFullPath(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH), Path.GetFullPath(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH), StringComparison.OrdinalIgnoreCase) ||
string.Equals(Path.GetFullPath(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH), Path.GetFullPath(ServerBootConfig.SOCKEYE_TEMP_FILES_PATH), StringComparison.OrdinalIgnoreCase) ||
string.Equals(Path.GetFullPath(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH), Path.GetFullPath(ServerBootConfig.SOCKEYE_TEMP_FILES_PATH), StringComparison.OrdinalIgnoreCase)
)
{
throw new System.NotSupportedException("E1040: The configuration settings SOCKEYE_ATTACHMENT_FILES_PATH, SOCKEYE_BACKUP_FILES_PATH and SOCKEYE_FOLDER_TEMPORARY_SYSTEM_FILES must all be different locations");
}
EnsurePath(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH);
EnsurePath(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH);
EnsurePath(ServerBootConfig.SOCKEYE_TEMP_FILES_PATH);
}
//create path if doesn't exist already
private static void EnsurePath(string path)
{
//Console.WriteLine($"FileUtil::EnsurePath path = [{path}]");
if (!Directory.Exists(path))
Directory.CreateDirectory(path);
}
#endregion folder ensurance
#region Temporary files handling
/// <summary>
/// Get a path combining supplied file name and backup files folder
/// </summary>
/// <returns></returns>
internal static string GetFullPathForTemporaryFile(string fileName)
{
return Path.Combine(TemporaryFilesFolder, fileName);
}
/// <summary>
/// Get backup file folder
/// </summary>
/// <returns></returns>
internal static string TemporaryFilesFolder
{
get
{
return ServerBootConfig.SOCKEYE_TEMP_FILES_PATH;
}
}
/// <summary>
/// Get a random file name with path to temporary files folder
/// </summary>
/// <returns></returns>
internal static string NewRandomTempFilesFolderFileName
{
get
{
return Path.Combine(TemporaryFilesFolder, NewRandomFileName);
}
}
/// <summary>
/// Confirm if a file exists in the temporary files folder
/// </summary>
/// <param name="fileName">name of temp folder file </param>
/// <returns>duh!</returns>
internal static bool TemporaryFileExists(string fileName)
{
if (string.IsNullOrWhiteSpace(fileName))
return false;
var FilePath = GetFullPathForTemporaryFile(fileName);
return File.Exists(FilePath);
}
/// <summary>
/// Erase all files found to be older than age
/// </summary>
internal static void CleanTemporaryFilesFolder(TimeSpan age)
{
DateTime EraseIfOlderThan = DateTime.UtcNow - age;
System.IO.DirectoryInfo di = new DirectoryInfo(TemporaryFilesFolder);
foreach (FileInfo file in di.EnumerateFiles())
{
if (file.CreationTimeUtc < EraseIfOlderThan)
{
/*
2022-03-08 14:42:13.8155|ERROR|JobsBiz|Server::ProcessJobsAsync unexpected error during processing|System.IO.IOException: The process cannot access the file 'c:\temp\ravendata\temp\vrimbqp2lia.pdf' because it is being used by another process.
at System.IO.FileSystem.DeleteFile(String fullPath)
at System.IO.FileInfo.Delete()
at Sockeye.Util.FileUtil.CleanTemporaryFilesFolder(TimeSpan age) in C:\data\code\raven\server\Sockeye\util\FileUtil.cs:line 137
at Sockeye.Biz.CoreJobTempFolderCleanup.DoWork() in C:\data\code\raven\server\Sockeye\generator\CoreJobTempFolderCleanup.cs:line 42
at Sockeye.Biz.JobsBiz.ProcessJobsAsync() in C:\data\code\raven\server\Sockeye\biz\JobsBiz.cs:line 232
*/
file.Delete();
}
}
}
#endregion
#region Utility (BACKUP) file handling
/// <summary>
/// Get a path combining supplied file name and backup files folder
/// </summary>
/// <returns></returns>
internal static string GetFullPathForBackupFile(string fileName)
{
return Path.Combine(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, fileName);
}
// /// <summary>
// /// Get backup folder
// /// </summary>
// /// <returns></returns>
// internal static string BackupFilesFolder
// {
// get
// {
// return ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH;
// }
// }
public class BackupFileInfo
{
public string length { get; set; }
public string Name { get; set; }
public DateTime Created { get; set; }
}
public class BackupStatus
{
public string AvailableFreeSpace { get; set; }
public List<BackupFileInfo> BackupFiles { get; set; }
public BackupStatus()
{
AvailableFreeSpace = null;
BackupFiles = new List<BackupFileInfo>();
}
}
/// <summary>
/// Get a status report of backup
/// for reporting to ops user in UI
/// </summary>
/// <returns></returns>
internal static BackupStatus BackupStatusReport()
{
BackupStatus statusReport = new BackupStatus();
try
{
statusReport.AvailableFreeSpace = GetBytesReadable(new System.IO.DriveInfo(Path.GetPathRoot(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH)).AvailableFreeSpace);
}
catch (Exception ex)
{
statusReport.AvailableFreeSpace = "ERROR";
ILogger log = Sockeye.Util.ApplicationLogging.CreateLogger("FileUtil::BackupStatus");
log.LogError(ex, "FileUtil::BackupStatusReport error getting available space");
}
var backupFiles = Directory.EnumerateFiles(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, "*");
foreach (string file in backupFiles.OrderByDescending(z => z))
{
var fi = new FileInfo(file);
statusReport.BackupFiles.Add(new BackupFileInfo()
{
Name = Path.GetFileName(file),
length = GetBytesReadable(fi.Length),
Created = fi.CreationTimeUtc
});
}
return statusReport;
}
// List<string> returnList = new List<string>();
// foreach (string file in Directory.EnumerateFiles(UtilityFilesFolder, "*"))
// {
// var fi = new FileInfo(file);
// returnList.Add(fi.Length);
// }
// returnList.Sort();
// return returnList;
/// <summary>
/// Get date of newest automatic backup file or minvalue if not found
///
/// </summary>
/// <returns></returns>
internal static DateTime MostRecentAutomatedBackupFileDate()
{
DateTime LastBackup = DateTime.MinValue;
foreach (string file in Directory.EnumerateFiles(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, "db-*.backup"))
{
var ThisFileTime = File.GetCreationTimeUtc(file);
if (ThisFileTime > LastBackup)
{
LastBackup = ThisFileTime;
}
}
return LastBackup;
}
/// <summary>
/// Confirm if a file exists in the utility folder
/// </summary>
/// <param name="fileName">name of utility file </param>
/// <returns>duh!</returns>
internal static bool BackupFileExists(string fileName)
{
if (string.IsNullOrWhiteSpace(fileName))
return false;
var utilityFilePath = GetFullPathForBackupFile(fileName);
return File.Exists(utilityFilePath);
}
// /// <summary>
// /// DANGER: Erases all Utility files including backups etc
// /// </summary>
// internal static void EraseEntireContentsOfBackupFilesFolder()
// {
// System.IO.DirectoryInfo di = new DirectoryInfo(BackupFilesFolder);
// foreach (FileInfo file in di.EnumerateFiles())
// {
// file.Delete();
// }
// foreach (DirectoryInfo dir in di.EnumerateDirectories())
// {
// dir.Delete(true);
// }
// }
/// <summary>
/// DANGER: Erase backup file if it exists by name
/// </summary>
internal static void EraseBackupFile(string name)
{
name = Path.GetFileName(name);//ensure no directory shenanigans, only a file name is allowed
//remove the file completely
var DeleteFilePath = Path.Combine(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, name);
if (File.Exists(DeleteFilePath))
{
//delete the temp file, it's already stored
File.Delete(DeleteFilePath);
}
//never return an error as that would leak info if someone is fishing for deleting files that don't exist
}
/// <summary>
/// Cleanup excess backups (backup folder file)
/// </summary>
/// <param name="keepCount"></param>
internal static void DatabaseBackupCleanUp(int keepCount)
{
if (keepCount < 1) keepCount = 1;
//case 4204 prepended db and at with * to accomodate manual backups also being pruned
//Database backups
var BackupFileList = Directory.EnumerateFiles(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, "*db-*.backup");
if (BackupFileList.Count() > keepCount)
{
//sort, skip newest x (keepcount) delete the rest
var DeleteCount = BackupFileList.Count() - keepCount;
var DeleteFileList = BackupFileList.OrderByDescending(m => m).Skip(keepCount).ToList();
foreach (string ExtraBackupFile in DeleteFileList)
{
File.Delete(ExtraBackupFile);
}
}
//Attachment backups
BackupFileList = Directory.EnumerateFiles(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH, "*at-*.zip");
if (BackupFileList.Count() > keepCount)
{
//sort, skip newest x (keepcount) delete the rest
var DeleteCount = BackupFileList.Count() - keepCount;
var DeleteFileList = BackupFileList.OrderByDescending(m => m).Skip(keepCount).ToList();
foreach (string ExtraBackupFile in DeleteFileList)
{
File.Delete(ExtraBackupFile);
}
}
}
internal static long BackupFilesDriveAvailableSpace()
{
//Console.WriteLine("b fileutil:backupfilesdriveavailablespace, backupfilesfolder:", ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH);
//Console.WriteLine("fileutil:backupfilesdriveavailablespace, backupfilesfolder FULLPATH:", Path.GetFullPath(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH));
// Console.WriteLine("fileutil:backupfilesdriveavailablespace, backupfilesfolder PATHROOT:", Path.GetPathRoot(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH));
return new System.IO.DriveInfo(Path.GetPathRoot(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH)).AvailableFreeSpace;
}
#endregion Utility file handling
#region Zip handling
////////////////////////////////////////////////////////////////////////////////////////
//ZIP handling
/// <summary>
/// Get zip entries for a utlity file
/// </summary>
/// <param name="zipFileName"></param>
/// <returns></returns>
internal static List<string> ZipGetUtilityFileEntries(string zipFileName)
{
return ZipGetEntries(GetFullPathForBackupFile(zipFileName));
}
/// <summary>
/// Get zip entries for full path and file name
/// returns the entry fullname sorted alphabetically so that folders stay together
/// </summary>
/// <param name="zipPath"></param>
/// <returns></returns>
internal static List<string> ZipGetEntries(string zipPath)
{
List<string> zipEntries = new List<string>();
using (ZipArchive archive = ZipFile.OpenRead(zipPath))
{
foreach (ZipArchiveEntry entry in archive.Entries)
{
zipEntries.Add(entry.FullName);
}
}
zipEntries.Sort();
return zipEntries;
}
#endregion Zip handling
#region Attachment file handling
// /// <summary>
// /// Get user folder
// /// </summary>
// /// <returns></returns>
// internal static string AttachmentFilesFolder
// {
// get
// {
// return ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH;
// }
// }
/// <summary>
/// Get a random file name with path to attachments folder
/// </summary>
/// <returns></returns>
internal static string NewRandomAttachmentFilesFolderFileName
{
get
{
return Path.Combine(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH, NewRandomFileName);
}
}
/// <summary>
/// Store a file attachment
/// </summary>
/// <returns></returns>
internal static async Task<FileAttachment> StoreFileAttachmentAsync(
string tempFilePath,
string contentType,
string fileName,
DateTime lastModified,
SockTypeId attachToObject,
string notes,
long attachedByUserId,
AyContext ct)
{
//calculate hash
var hash = FileHash.GetChecksum(tempFilePath);
//Move to folder based on hash
var permanentPath = GetPermanentAttachmentPath(hash);
EnsurePath(permanentPath);
var permanentFilePath = Path.Combine(permanentPath, hash);
var FileSize = new FileInfo(tempFilePath).Length;
//See if the file was already uploaded, if so then ignore it for now
if (File.Exists(permanentFilePath))
{
//delete the temp file, it's already stored
File.Delete(tempFilePath);
}
else
{
System.IO.File.Move(tempFilePath, permanentFilePath);
}
//seems to be uploaded with the text null
if (notes == "null") notes = string.Empty;
//Build AyFileInfo
FileAttachment fi = new FileAttachment()
{
StoredFileName = hash,
DisplayFileName = fileName,
Notes = notes,
ContentType = contentType,
AttachToObjectId = attachToObject.ObjectId,
AttachToAType = attachToObject.SockType,
LastModified = lastModified,
Size = FileSize,
AttachedByUserId = attachedByUserId
};
//Store in DB
await ct.FileAttachment.AddAsync(fi);
await ct.SaveChangesAsync();
//Return AyFileInfo object
return fi;
}
/// <summary>
///use first three characters for name of folders one character per folder, i.e.:
///if the checksum is f6a5b1236dbba1647257cc4646308326
///it would be stored in userfiles/f/6/a/f6a5b1236dbba1647257cc4646308326
/// </summary>
/// <param name="hash"></param>
/// <returns>Path without the file</returns>
internal static string GetPermanentAttachmentPath(string hash)
{
return Path.Combine(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH, hash[0].ToString(), hash[1].ToString(), hash[2].ToString());
}
/// <summary>
/// Get the whole path including file name not just the folder
/// </summary>
/// <param name="hash"></param>
/// <returns></returns>
internal static string GetPermanentAttachmentFilePath(string hash)
{
return Path.Combine(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH, hash[0].ToString(), hash[1].ToString(), hash[2].ToString(), hash);
}
//////////////////////////////////////////////////////////
//
// Delete all attachments for object
//
internal static async Task DeleteAttachmentsForObjectAsync(SockType sockType, long sockId, AyContext ct)
{
var deleteList = await ct.FileAttachment.Where(z => z.AttachToObjectId == sockId && z.AttachToAType == sockType).ToListAsync();
foreach (var d in deleteList)
{
await DeleteFileAttachmentAsync(d, ct);
}
}
/// <summary>
/// Delete a file attachment
/// checks ref count and if would be zero deletes file physically
/// otherwise just deletes pointer in db
/// </summary>
/// <param name="fileAttachmentToBeDeleted"></param>
/// <param name="ct"></param>
/// <returns></returns>
internal static async Task DeleteFileAttachmentAsync(FileAttachment fileAttachmentToBeDeleted, AyContext ct)
{
//check ref count of file
var count = await ct.FileAttachment.LongCountAsync(z => z.StoredFileName == fileAttachmentToBeDeleted.StoredFileName);
//Remove from the DB
ct.FileAttachment.Remove(fileAttachmentToBeDeleted);
await ct.SaveChangesAsync();
if (count < 2)
{
//remove the file completely
var permanentPath = GetPermanentAttachmentPath(fileAttachmentToBeDeleted.StoredFileName);
var permanentFilePath = Path.Combine(permanentPath, fileAttachmentToBeDeleted.StoredFileName);
if (File.Exists(permanentFilePath))
{
//delete the temp file, it's already stored
File.Delete(permanentFilePath);
}
}
//Return AyFileInfo object
return;
}
/// <summary>
/// DANGER: Erases all user files
/// </summary>
internal static void EraseEntireContentsOfAttachmentFilesFolder()
{
System.IO.DirectoryInfo di = new DirectoryInfo(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH);
foreach (FileInfo file in di.EnumerateFiles())
{
file.Delete();
}
foreach (DirectoryInfo dir in di.EnumerateDirectories())
{
dir.Delete(true);
}
}
internal static void BackupAttachments(string demandFileNamePrepend, ILogger log = null)
{
try
{
var AttachmentsBackupFile = $"{demandFileNamePrepend}at-{FileUtil.GetSafeDateFileName()}.zip";//presentation issue so don't use UTC for this one
AttachmentsBackupFile = GetFullPathForBackupFile(AttachmentsBackupFile);
System.IO.Compression.ZipFile.CreateFromDirectory(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH, AttachmentsBackupFile);
}
catch (Exception ex)
{
if (log != null)
{
log.LogError(ex, $"FileUtil::BackupAttachments");
}
throw;
}
}
internal static long AttachmentFilesDriveAvailableSpace()
{
return new System.IO.DriveInfo(Path.GetPathRoot(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH)).AvailableFreeSpace;
}
internal static IEnumerable<string> GetAllAttachmentFilePaths()
{
return Directory.EnumerateFiles(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH, "*", SearchOption.AllDirectories);
}
/// <summary>
/// Confirm if a file exists in the attachment folder
/// </summary>
/// <param name="fileName">name of attachment file </param>
internal static bool AttachmentFileExists(string fileName)
{
if (string.IsNullOrWhiteSpace(fileName))
return false;
// var utilityFilePath = GetFullPathForBackupFile(fileName);
return File.Exists(GetPermanentAttachmentFilePath(fileName));
}
internal static bool AppearsToBeAnOrphanedAttachment(string fullPathName)
{
// is it in the correct folder which is named based on it's hash?
// Is it X characters long (they all are or not?) Not sure though actually, maybe they are all 64 characters, maybe not
// Does it have an extension? None of ours have an extension
if (Path.HasExtension(fullPathName)) return false;
var FileName = Path.GetFileName(fullPathName);
//2339371F6C0C88656888163072635B282BB7FFF7B33771AB2295C868A0FECD34
//3D67D4D258DCC7BB3CB560013C737E9865DFFB324C2012AA7E9E75CCCBE4133C
//BA7816BF8F01CFEA414140DE5DAE2223B00361A396177A9CB410FF61F20015AD
//40CE02D157C845E42AA4EF7DCC93A74B0179649C8D0A806B2F985D34AA7385CE
//9F2BA2DF87889B1E71346CC575A6F57334B441DB5AE4D40814F95E232C9539B5
//got to be at least 32 chars
if (FileName.Length < 32) return false;
//probably all 64 chars but let's not count on that and go with folder is correct
//what *should* the path be for a file of this name?
var ExpectedFullPath = GetPermanentAttachmentFilePath(FileName);
//if expected equals real then it's very likely an orphaned file
return fullPathName == ExpectedFullPath;
}
#endregion attachment stuff
#region General utilities
/// <summary>
/// Get a random file name, no extension
/// </summary>
/// <returns></returns>
internal static string NewRandomFileName
{
get
{
return Path.GetRandomFileName();
}
}
//https://stackoverflow.com/a/11124118/8939
// Returns the human-readable file size for an arbitrary, 64-bit file size
// The default format is "0.### XB", e.g. "4.2 KB" or "1.434 GB"
public static string GetBytesReadable(long i)
{
// Get absolute value
long absolute_i = (i < 0 ? -i : i);
// Determine the suffix and readable value
string suffix;
double readable;
if (absolute_i >= 0x1000000000000000) // Exabyte
{
suffix = "EiB";
readable = (i >> 50);
}
else if (absolute_i >= 0x4000000000000) // Petabyte
{
suffix = "PiB";
readable = (i >> 40);
}
else if (absolute_i >= 0x10000000000) // Terabyte
{
suffix = "TiB";
readable = (i >> 30);
}
else if (absolute_i >= 0x40000000) // Gigabyte
{
suffix = "GiB";
readable = (i >> 20);
}
else if (absolute_i >= 0x100000) // Megabyte
{
suffix = "MiB";
readable = (i >> 10);
}
else if (absolute_i >= 0x400) // Kilobyte
{
suffix = "KiB";
readable = i;
}
else
{
return i.ToString("0 B"); // Byte
}
// Divide by 1024 to get fractional value
readable = (readable / 1024);
// Return formatted number with suffix
return readable.ToString("0.### ") + suffix;
}
/// <summary>
/// Attachments / user files folder size info
/// </summary>
/// <returns></returns>
internal static FolderSizeInfo GetAttachmentFolderSizeInfo()
{
return GetDirectorySize(new DirectoryInfo(ServerBootConfig.SOCKEYE_ATTACHMENT_FILES_PATH));
}
/// <summary>
/// Utility / backup folder file size info
/// </summary>
/// <returns></returns>
internal static FolderSizeInfo GetBackupFolderSizeInfo()
{
return GetDirectorySize(new DirectoryInfo(ServerBootConfig.SOCKEYE_BACKUP_FILES_PATH));
}
/// <summary>
/// Calculate disk space usage under <paramref name="root"/>. If <paramref name="levels"/> is provided,
/// then return subdirectory disk usages as well, up to <paramref name="levels"/> levels deep.
/// If levels is not provided or is 0, return a list with a single element representing the
/// directory specified by <paramref name="root"/>.
///
/// FROM https://stackoverflow.com/a/28094795/8939
///
/// </summary>
/// <returns></returns>
public static FolderSizeInfo GetDirectorySize(DirectoryInfo root, int levels = 0)
{
var currentDirectory = new FolderSizeInfo();
// Add file sizes.
FileInfo[] fis = root.GetFiles();
currentDirectory.Size = 0;
foreach (FileInfo fi in fis)
{
currentDirectory.Size += fi.Length;
}
// Add subdirectory sizes.
DirectoryInfo[] dis = root.GetDirectories();
currentDirectory.Path = root;
currentDirectory.SizeWithChildren = currentDirectory.Size;
currentDirectory.DirectoryCount = dis.Length;
currentDirectory.DirectoryCountWithChildren = dis.Length;
currentDirectory.FileCount = fis.Length;
currentDirectory.FileCountWithChildren = fis.Length;
if (levels >= 0)
currentDirectory.Children = new List<FolderSizeInfo>();
foreach (DirectoryInfo di in dis)
{
var dd = GetDirectorySize(di, levels - 1);
if (levels >= 0)
currentDirectory.Children.Add(dd);
currentDirectory.SizeWithChildren += dd.SizeWithChildren;
currentDirectory.DirectoryCountWithChildren += dd.DirectoryCountWithChildren;
currentDirectory.FileCountWithChildren += dd.FileCountWithChildren;
}
return currentDirectory;
}
public class FolderSizeInfo
{
public DirectoryInfo Path { get; set; }
public long SizeWithChildren { get; set; }
public long Size { get; set; }
public long DirectoryCount { get; set; }
public long DirectoryCountWithChildren { get; set; }
public long FileCount { get; set; }
public long FileCountWithChildren { get; set; }
public List<FolderSizeInfo> Children { get; set; }
}
//Note assume local time because file times as this is used (backup etc) are a presentation issue not a db issue
public static string GetSafeDateFileName()
{
return DateTime.Now.ToString("yyyyMMddHHmmssfff");
}
public static string StringToSafeFileName(string fileName)
{//https://stackoverflow.com/a/3678296/8939
if (string.IsNullOrWhiteSpace(fileName))
return "no_name";
char[] invalidFileNameChars = Path.GetInvalidFileNameChars();
// Builds a string out of valid chars and an _ for invalid ones
var ret = new string(fileName.Select(ch => invalidFileNameChars.Contains(ch) ? '_' : ch).ToArray());
if (string.IsNullOrWhiteSpace(ret))
return "no_name";
return ret;
}
public static string StringPathDecodeEnvironmentVariables(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
return string.Empty;
}
//Linux ~ home folder special handling here
if (path.Contains('~'))
{
//return because no environment variables are expended on linux, this is the only special character
return path.Replace("~", Environment.GetFolderPath(Environment.SpecialFolder.UserProfile));
}
return System.Environment.ExpandEnvironmentVariables(path);
}
#endregion general utilities
#region licensing related utility to qualify upgradability
//https://www.meziantou.net/getting-the-date-of-build-of-a-dotnet-assembly-at-runtime.htm
public static DateTime GetLinkerTimestampUtc(Assembly assembly)
{
var location = assembly.Location;
return GetLinkerTimestampUtc(location);
}
public static DateTime GetLinkerTimestampUtc(string filePath)
{
const int peHeaderOffset = 60;
const int linkerTimestampOffset = 8;
var bytes = new byte[2048];
using (var file = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
file.Read(bytes, 0, bytes.Length);
}
var headerPos = BitConverter.ToInt32(bytes, peHeaderOffset);
var secondsSince1970 = BitConverter.ToInt32(bytes, headerPos + linkerTimestampOffset);
var dt = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
return dt.AddSeconds(secondsSince1970);
}
#endregion
}//eoc
}//eons

77
server/util/Hasher.cs Normal file
View File

@@ -0,0 +1,77 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using Microsoft.AspNetCore.Cryptography.KeyDerivation;
namespace Sockeye.Util
{
public static class Hasher
{
public static string hash(string Salt, string Password)
{
//adapted from here:
//https://docs.microsoft.com/en-us/aspnet/core/security/data-protection/consumer-apis/password-hashing
string hashed = Convert.ToBase64String(KeyDerivation.Pbkdf2(
password: Password,
salt: Convert.FromBase64String(Salt),
prf: KeyDerivationPrf.HMACSHA512,
iterationCount: 10000,
numBytesRequested: 512 / 8));
return hashed;
}
//Generate salt
/*
Used for many things:
DBID, JWT secret key when none provided, User Salt for login / password,
temporary 2fa codes, download tokens,
temporary user pw / login when newly created and haven't been set yet
*/
public static string GenerateSalt()
{
var salt = new byte[32];
var random = RandomNumberGenerator.Create();
random.GetNonZeroBytes(salt);
return Convert.ToBase64String(salt);
}
public static string GetRandomAlphanumericString(int length)
{
const string alphanumericCharacters = "0123456789abcdefghijkmnopqrstuvwxyz";
return GetRandomString(length, alphanumericCharacters);
}
public static string GetRandomString(int length, IEnumerable<char> characterSet)
{
if (length < 0)
throw new ArgumentException("length must not be negative", "length");
if (length > int.MaxValue / 8)
throw new ArgumentException("length is too big", "length");
if (characterSet == null)
throw new ArgumentNullException("characterSet");
var characterArray = characterSet.Distinct().ToArray();
if (characterArray.Length == 0)
throw new ArgumentException("characterSet must not be empty", "characterSet");
var bytes = new byte[length * 8];
RandomNumberGenerator.Create().GetBytes(bytes);
var result = new char[length];
for (int i = 0; i < length; i++)
{
ulong value = BitConverter.ToUInt64(bytes, i * 8);
result[i] = characterArray[value % (uint)characterArray.Length];
}
return new string(result);
}
}//eoc
}//eons

69
server/util/ImportUtil.cs Normal file
View File

@@ -0,0 +1,69 @@
using System;
using System.Reflection;
using System.Collections.Generic;
namespace Sockeye.Util
{
internal static class ImportUtil
{
/// <summary>
/// Copies the data of one object to another. The target object 'pulls' properties of the first.
/// Any matching properties are written to the target.
///
/// The object copy is a shallow copy only. Any nested types will be copied as
/// whole values rather than individual property assignments (ie. via assignment)
/// </summary>
/// <param name="source">The source object to copy from</param>
/// <param name="target">The object to copy to</param>
/// <param name="propertiesToUpdate">A list of properties that should be copied</param>
public static void Update(object source, object target, List<string> propertiesToUpdate)
{
MemberInfo[] miT = target.GetType().GetMembers(BindingFlags.Public | BindingFlags.Instance);
foreach (MemberInfo Field in miT)
{
string name = Field.Name;
// Skip over any property exceptions
if (!propertiesToUpdate.Contains(name))
continue;
if (Field.MemberType == MemberTypes.Field)
{
FieldInfo SourceField = source.GetType().GetField(name);
if (SourceField == null)
continue;
object SourceValue = SourceField.GetValue(source);
((FieldInfo)Field).SetValue(target, SourceValue);
}
else if (Field.MemberType == MemberTypes.Property)
{
PropertyInfo piTarget = Field as PropertyInfo;
PropertyInfo SourceField = source.GetType().GetProperty(name, BindingFlags.Public | BindingFlags.Instance);
if (SourceField == null)
continue;
if (piTarget.CanWrite && SourceField.CanRead)
{
object SourceValue = SourceField.GetValue(source, null);
piTarget.SetValue(target, SourceValue, null);
}
}
}
}
public static string GetImportTag()
{
return "zz-import-" + DateTime.Now.ToString("yyyyMMddHHmmss");
}
}//eoc
}//eons

View File

@@ -0,0 +1,31 @@
using System.Net;
using Microsoft.AspNetCore.Http;
//https://stackoverflow.com/a/41242493/8939
public static class IsLocalExtension
{
private const string NullIpAddress = "::1";
public static bool IsLocal(this HttpRequest req)
{
var connection = req.HttpContext.Connection;
if (connection.RemoteIpAddress.IsSet())
{
//We have a remote address set up
return connection.LocalIpAddress.IsSet()
//Is local is same as remote, then we are local
? connection.RemoteIpAddress.Equals(connection.LocalIpAddress)
//else we are remote if the remote IP address is not a loopback address
: IPAddress.IsLoopback(connection.RemoteIpAddress);
}
return true;
}
private static bool IsSet(this IPAddress address)
{
return address != null && address.ToString() != NullIpAddress;
}
}

106
server/util/JsonUtil.cs Normal file
View File

@@ -0,0 +1,106 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
namespace Sockeye.Util
{
internal static class JsonUtil
{
/// <summary>
/// Clean JSON string fragment, remove unnecessary characters
/// can be called with anything and should handle it properly even empty values etc
/// </summary>
/// <param name="jsonIn"></param>
/// <returns></returns>
public static string CompactJson(string jsonIn)
{
if (string.IsNullOrWhiteSpace(jsonIn))
{
return jsonIn;
}
if (jsonIn.StartsWith("["))
{
JArray j = JArray.Parse(jsonIn);
return JsonConvert.SerializeObject(j, Formatting.None);
}
if (jsonIn.StartsWith("{"))
{
JObject j = JObject.Parse(jsonIn);
return JsonConvert.SerializeObject(j, Formatting.None);
}
//Not an object or an array so just return it
return jsonIn;
}
/// <summary>
/// Utility used by biz classes to extract all the custom field data as text strings suitable for search indexing
/// Does not take into account type of field only what is in it and weeds out bools and any other non suitable for search text
/// </summary>
/// <param name="jsonIn"></param>
/// <returns></returns>
public static List<string> GetCustomFieldsAsStringArrayForSearchIndexing(string jsonIn)
{
var ret = new List<string>();
if (!string.IsNullOrWhiteSpace(jsonIn))
{
var j = JObject.Parse(jsonIn);
//iterate the values in the custom fields
foreach (KeyValuePair<string, JToken> kv in j)
{
//Add as string any value that isn't a bool since bools are useless for searching
//and don't add dates as it gets hellish to factor in time zone conversions and local server vs user date format and all that shit
//and at the end of the day it won't really be useful for searching as people will probably ask for a filter or sort instead which we may have to
//look at in future, for search though just the numbers and text that are plausibly search-worthy
if (kv.Value.Type != JTokenType.Boolean && kv.Value.Type != JTokenType.Date)
{
ret.Add(kv.Value.Value<string>());
}
}
}
return ret;
}
public static bool JTokenIsNullOrEmpty(JToken token)
{
return (token == null) ||
(token.Type == JTokenType.Array && !token.HasValues) ||
(token.Type == JTokenType.Object && !token.HasValues) ||
(token.Type == JTokenType.String && token.ToString() == String.Empty) ||
(token.Type == JTokenType.Null) ||
(token.Type == JTokenType.Undefined);
}
//Contract resolver used for exporting to file translations and report templates
//and ignoring specified propertes
public class ShouldSerializeContractResolver : DefaultContractResolver
{
private readonly IEnumerable<string> _excludePropertyNames;
public ShouldSerializeContractResolver(IEnumerable<string> excludePropertyNames)
{
_excludePropertyNames = excludePropertyNames;
}
protected override IList<JsonProperty> CreateProperties(Type type, MemberSerialization memberSerialization)
{
IList<JsonProperty> properties = base.CreateProperties(type, memberSerialization);
properties = properties.Where(p => !_excludePropertyNames.Any(p2 => p2 == p.PropertyName)).ToList();
return properties;
}
}
}//eoc
}//eons

91
server/util/Mailer.cs Normal file
View File

@@ -0,0 +1,91 @@
using MailKit.Net.Smtp;
using MimeKit;
using System;
using System.Threading.Tasks;
using MimeKit.Text;
using System.IO;
namespace Sockeye.Util
{
public interface IMailer
{
Task SendEmailAsync(string email, string subject, string body, Sockeye.Models.GlobalOpsNotificationSettings smtpSettings, string attachPDF = null, string forceFileName = null, string htmlBody = null);
}
public class Mailer : IMailer
{
public Mailer()
{
}
public async Task SendEmailAsync(string email, string subject, string body, Sockeye.Models.GlobalOpsNotificationSettings smtpSettings, string attachPDFPath = null, string forceFileName = null, string htmlBody = null)
{
try
{
var message = new MimeMessage();
message.From.Add(new MailboxAddress(smtpSettings.NotifyFromAddress, smtpSettings.NotifyFromAddress));
message.To.Add(MailboxAddress.Parse(email));
message.Subject = subject;
if (!string.IsNullOrWhiteSpace(attachPDFPath))
{
var attachment = new MimePart("application/pdf", "pdf")
{
Content = new MimeContent(File.OpenRead(attachPDFPath), ContentEncoding.Default),
ContentDisposition = new ContentDisposition(ContentDisposition.Attachment),
ContentTransferEncoding = ContentEncoding.Base64,
FileName = Path.GetFileName(attachPDFPath)
};
if (!string.IsNullOrWhiteSpace(forceFileName))
attachment.FileName = forceFileName;
var multipart = new Multipart("mixed");
if (!string.IsNullOrWhiteSpace(body))
multipart.Add(new TextPart(TextFormat.Plain) { Text = body });
multipart.Add(attachment);
message.Body = multipart;
}
else
{
// if (!string.IsNullOrWhiteSpace(body))
// message.Body = new TextPart(TextFormat.Plain) { Text = body };
MimeKit.BodyBuilder builder = new BodyBuilder();
if (!string.IsNullOrWhiteSpace(body))
builder.TextBody = body;
if (!string.IsNullOrWhiteSpace(htmlBody))
builder.HtmlBody = htmlBody;
message.Body = builder.ToMessageBody();
}
using (var client = new SmtpClient())
{
client.ServerCertificateValidationCallback = (s, c, h, e) => true;
bool UseSSL = smtpSettings.ConnectionSecurity != Biz.NotifyMailSecurity.None;
await client.ConnectAsync(smtpSettings.SmtpServerAddress, smtpSettings.SmtpServerPort, UseSSL);
await client.AuthenticateAsync(smtpSettings.SmtpAccount, smtpSettings.SmtpPassword);
await client.SendAsync(message);
await client.DisconnectAsync(true);
DisposeStreamsInMimeMessage(message);
}
}
catch (Exception e)
{
throw new InvalidOperationException(e.Message);
}
}
public static void DisposeStreamsInMimeMessage(MimeMessage msg) { foreach (var part in msg.BodyParts) (part as MimePart)?.Content?.Stream?.Dispose(); }
}
}

19
server/util/MoneyUtil.cs Normal file
View File

@@ -0,0 +1,19 @@
using System;
namespace Sockeye.Util
{
internal static class MoneyUtil
{
/// <summary>
/// Rounding for monetary values
///
/// </summary>
/// <param name="d"></param>
/// <returns></returns>
public static decimal Round(decimal d) => Decimal.Round(d, 2, MidpointRounding.AwayFromZero);
}//eoc
}//eons

View File

@@ -0,0 +1,49 @@
using System;
using System.Collections.Generic;
namespace Sockeye.Util
{
//Object cache - used by biz objects during get report data to temporarily cache values from database for single request
//saves db calls and formatting
internal class ObjectCache
{
private Dictionary<string, object> _cache = new Dictionary<string, object>();
// internal ObjectCache()
// {
// System.Diagnostics.Debug.WriteLine("constructing objectcache");
// }
internal void Clear()
{
_cache.Clear();
}
internal void Add(object value, string key, long? id = 0)
{
_cache[$"{key}{id}"] = value;
// System.Diagnostics.Debug.WriteLine($"ADD {key}{id} - {value}");
}
internal object Get(string key, long? id = 0)
{
object value = null;
if (_cache.TryGetValue($"{key}{id}", out value))
{
//System.Diagnostics.Debug.WriteLine($"get cache hit {key}{id}");
return value;
}
else
{
//System.Diagnostics.Debug.WriteLine($"get cache miss {key}{id}");
return null;
}
}
internal bool Has(string key, long? id = 0)
{
return _cache.ContainsKey($"{key}{id}");
}
}//eoc
}//eons

View File

@@ -0,0 +1,164 @@
using System;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Threading.Tasks;
using Sockeye.Biz;
using Microsoft.Extensions.Logging;
namespace Sockeye.Util
{
//Track processes and kill any that go past their expiry date
internal static class ReportRenderManager
{
internal static ConcurrentBag<ReportRenderInstanceInfo> _baginstances;
static ReportRenderManager()
{
_baginstances = new ConcurrentBag<ReportRenderInstanceInfo>();
}
internal class ReportRenderInstanceInfo
{
internal int ReporterProcessId { get; set; }
internal DateTime Expires { get; set; }
internal Guid JobId { get; set; }
internal ReportRenderInstanceInfo(Guid jobId, DateTime expires)
{
JobId = jobId;
Expires = expires;
ReporterProcessId = -1;
}
}
internal static async Task KillExpiredRenders(ILogger log)
{
log.LogDebug("Clear potential expired render jobs check");
//check for expired and remove
var Instances = _baginstances.ToArray();
var dtNow = DateTime.UtcNow;
foreach (ReportRenderInstanceInfo i in Instances)
{
if (i.Expires < dtNow)
{
log.LogDebug($"attempting close of expired process {i.ReporterProcessId} for job {i.JobId}");
await CloseRenderProcess(i, log, true);
}
}
}
internal static async Task<bool> CloseRenderProcess(ReportRenderInstanceInfo instance, ILogger log, bool force)
{
if (force)
log.LogDebug($"Force CloseRenderProcess on report render instance id {instance.ReporterProcessId} expiry {instance.Expires.ToString()} utc");
else
log.LogDebug($"Normal CloseRenderProcess on report render instance id {instance.ReporterProcessId}");
try
{
//either way, clear the job so the client gets informed
if (force)
{
if (instance.Expires < DateTime.UtcNow)
{
var json = Newtonsoft.Json.JsonConvert.SerializeObject(new { rendererror = new { timeout = true, timeoutsetting = ServerBootConfig.SOCKEYE_REPORT_RENDERING_TIMEOUT } }, Newtonsoft.Json.Formatting.None);
await JobsBiz.LogJobAsync(instance.JobId, json);
await JobsBiz.UpdateJobStatusAsync(instance.JobId, JobStatus.Failed);
}
else
{
var json = Newtonsoft.Json.JsonConvert.SerializeObject(new { rendererror = new { cancelled = true} }, Newtonsoft.Json.Formatting.None);
await JobsBiz.LogJobAsync(instance.JobId, json);
await JobsBiz.UpdateJobStatusAsync(instance.JobId, JobStatus.Completed);
}
}
if (instance.ReporterProcessId != -1)//if a job doesn't have a process id yet it will be -1
{
var p = Process.GetProcessById(instance.ReporterProcessId);
if (p != null)
{
//we have an existing process
//try to kill it
p.Kill(true);
if (p.HasExited == false)
{
log.LogWarning($"Expired report render instance id {instance.ReporterProcessId} could not be force closed");
return false;//can't kill it so can't free up a slot
}
}
}
//remove it from the list, it's either gone or killed at this point
//this would not be unexpected since it will normally just close on it's own
//at the finally block in render report
_baginstances.TryTake(out instance);
return true;//process that was there is now not there so while not perfect system we will consider it free
}
catch (ArgumentException)
{
//do nothing, this is normal, the process could not be found and this means it's already been removed:
//ArgumentException
//The process specified by the processId parameter is not running. The identifier might be expired.
_baginstances.TryTake(out instance);
return true;
}
}
internal static void AddJob(Guid jobId, ILogger log)
{
log.LogDebug($"AddJob - {jobId} to the collection");
_baginstances.Add(new ReportRenderInstanceInfo(jobId, DateTime.UtcNow.AddMinutes(ServerBootConfig.SOCKEYE_REPORT_RENDERING_TIMEOUT)));
log.LogDebug($"AddJob - there are currently {_baginstances.Count} instances in the collection");
}
internal static void SetProcess(Guid jobId, int processId, ILogger log)
{
log.LogDebug($"SetProcess - setting {jobId} to render process id {processId}");
foreach (var i in _baginstances)
{
if (i.JobId == jobId)
{
i.ReporterProcessId = processId;
break;
}
}
}
internal static async Task RemoveJob(Guid jobId, ILogger log, bool force)
{
foreach (var i in _baginstances)
{
if (i.JobId == jobId)
{
await CloseRenderProcess(i, log, force);
break;
}
}
}
internal static bool KeepGoing(Guid jobId)
{
//if job id is empty it means it was called from outside of a job (report designer get data for example)
if(jobId==Guid.Empty) return true;
foreach (var i in _baginstances)
{
if (i.JobId == jobId)
{
return true;
}
}
return false;
}
}//eoc
}//eons

View File

@@ -0,0 +1,22 @@
using System;
namespace Sockeye.Util
{
public class ReportRenderTimeOutException : Exception
{
public ReportRenderTimeOutException()
{
}
public ReportRenderTimeOutException(string message)
: base(message)
{
}
public ReportRenderTimeOutException(string message, Exception inner)
: base(message, inner)
{
}
}
}//eons

View File

@@ -0,0 +1,52 @@
using System;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Sockeye.Util
{
/// <summary>
/// what it says
/// </summary>
public static class RetryHelper
{
/// <summary>
///
/// </summary>
/// <param name="times"></param>
/// <param name="delay"></param>
/// <param name="log"></param>
/// <param name="logPrepend"></param>
/// <param name="operation"></param>
public static void RetryOnException(int times, TimeSpan delay, ILogger log, string logPrepend, Action operation)
{
var attempts = 0;
do
{
try
{
attempts++;
operation();
break; // Sucess! Lets exit the loop!
}
catch (Exception ex)
{
if (attempts == times)
throw;
log.LogError(ex, $"{logPrepend} Exception caught on attempt {attempts} of {times} - will retry after delay {delay}");
Task.Delay(delay).Wait();
}
} while (true);
}
}
}

130
server/util/RunProgram.cs Normal file
View File

@@ -0,0 +1,130 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Microsoft.Extensions.Logging;
namespace Sockeye.Util
{
public static class RunProgram
{
public static string Run(string cmd, string arguments, ILogger log = null, int waitForExitTimeOut = int.MaxValue)
{
try
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return RunWindows(cmd, arguments, waitForExitTimeOut);
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
return RunOSX(cmd, arguments, waitForExitTimeOut);
}
else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return RunLinuxShell(cmd, arguments, waitForExitTimeOut);
}
}
catch (Exception ex)
{
if (log != null)
{
log.LogError(ex, $"RunProgram error running command:{cmd} {arguments}");
}
throw;
}
throw new PlatformNotSupportedException();
}
private static string RunWindows(string cmd, string arguments, int waitForExitTimeOut = int.MaxValue)
{
//RunProgram.Run("cmd.exe",FullRunCommand, log);
// var FullRunCommand=$"/C {BackupUtilityCommand} {Arguments}";
//for Windows need to pass to cmd.exe because often have command line piping etc
var args = $"/C {cmd} {arguments}";
using (var process = new Process())
{
process.StartInfo = new ProcessStartInfo
{
FileName = "cmd.exe",
Arguments = args,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true,
};
process.Start();
string result = $"{process.StandardOutput.ReadToEnd()}{process.StandardError.ReadToEnd()} ";
if (!process.WaitForExit(waitForExitTimeOut))
{
result += $"\nERROR: TIMED OUT {waitForExitTimeOut}ms BEFORE COMPLETION";
}
return result;
}
}
private static string RunLinuxShell(string cmd, string arguments, int waitForExitTimeOut = int.MaxValue)
{
var escapedArgs = $"{cmd} {arguments}".Replace("\"", "\\\"");
using (var process = new Process())
{
process.StartInfo = new ProcessStartInfo
{
FileName = "/bin/sh",
Arguments = $"-c \"{escapedArgs}\"",
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true,
};
process.Start();
string result = process.StandardOutput.ReadToEnd();
if (!process.WaitForExit(waitForExitTimeOut))
{
result += $"\nERROR: TIMED OUT {waitForExitTimeOut}ms BEFORE COMPLETION";
}
return result;
}
}
private static string RunOSX(string cmd, string arguments, int waitForExitTimeOut = int.MaxValue)
{
using (var process = new Process())
{
process.StartInfo = new ProcessStartInfo
{
FileName = cmd,
Arguments = arguments,
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true,
};
process.Start();
string result = process.StandardOutput.ReadToEnd();
if (!process.WaitForExit(waitForExitTimeOut))
{
result += $"\nERROR: TIMED OUT {waitForExitTimeOut}ms BEFORE COMPLETION";
}
return result;
}
}
}
}

View File

@@ -0,0 +1,244 @@
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.Extensions.Configuration;
namespace Sockeye.Util
{
/// <summary>
/// Contains config values from bootup
/// </summary>
internal static class ServerBootConfig
{
//############################################################################################################
//STATIC HARD CODED COMPILE TIME DEFAULTS NOT SET THROUGH CONFIG
internal const int FAILED_AUTH_DELAY = 3000;//ms
internal const int JOB_OBJECT_HANDLE_BATCH_JOB_LOOP_DELAY = 200;//ms this delay is a temporary measure to ensure super big time consuming batch jobs don't use all server CPU resources
internal const int JOB_PROGRESS_UPDATE_AND_CANCEL_CHECK_SECONDS = 5;//seconds between progress updates and checks for cancellation of long running jobs
//UPLOAD LIMITS 1048576 = 1MiB for testing 10737420000 10737418240 10,737,418,240
internal const long MAX_ATTACHMENT_UPLOAD_BYTES = 10737420000;//slight bit of overage as 10737418241=10GiB
internal const long MAX_LOGO_UPLOAD_BYTES = 512000;//500KiB limit
internal const long MAX_IMPORT_FILE_UPLOAD_BYTES = 104857600;//100MiB limit
internal const long MAX_REPORT_TEMPLATE_UPLOAD_BYTES = 15728640;//15MiB limit; currently the largest v7 export for a report template is 828kb, I'm guessing 15mb is more than enough
internal const long MAX_TRANSLATION_UPLOAD_BYTES = 15728640;//15MiB limit; currently export file is 200kb * 50 maximum at a time = 15mb
//############################################################################################################
//Diagnostic static values used during development, may not be related to config at all, this is just a convenient class to put them in
#if (DEBUG)
internal static List<string> TranslationKeysRequested { get; set; }
#endif
//CONTENTROOTPATH
//** Not intended for end users but required in release mode
internal static string SOCKEYE_CONTENT_ROOT_PATH { get; set; } //Note: set in startup.cs, not in program.cs as it requires startup IHostingEnvironment
//LANGUAGE / Translation
internal static string SOCKEYE_DEFAULT_TRANSLATION { get; set; }
//** Not intended for end users
internal static long SOCKEYE_DEFAULT_TRANSLATION_ID { get; set; } //internal setting set at boot by TranslationBiz::ValidateTranslations
//API
internal static string SOCKEYE_JWT_SECRET { get; set; }
internal static string SOCKEYE_USE_URLS { get; set; }
internal static int SOCKEYE_REPORT_RENDERING_TIMEOUT { get; set; }
//DATABASE
internal static string SOCKEYE_DB_CONNECTION { get; set; }
//** Not intended for end users
internal static bool SOCKEYE_PERMANENTLY_ERASE_DATABASE { get; set; }
//FILE FOLDERS
internal static string SOCKEYE_ATTACHMENT_FILES_PATH { get; set; }
internal static string SOCKEYE_BACKUP_FILES_PATH { get; set; }
internal static string SOCKEYE_TEMP_FILES_PATH { get; set; }
//BACKUP PG_DUMP PATH (IF NOT IN PATH ALREADY)
internal static string SOCKEYE_BACKUP_PG_DUMP_PATH { get; set; }
//REPORT RENDERING BROWSER PATH (if not set then will attempt to auto-download on first render)
internal static string SOCKEYE_REPORT_RENDER_BROWSER_PATH { get; set; }
//REPORT RENDERING BROWSER PARAMS
internal static string SOCKEYE_REPORT_RENDER_BROWSER_PARAMS { get; set; }
//LOGGING
internal static string SOCKEYE_LOG_PATH { get; set; }
internal static string SOCKEYE_LOG_LEVEL { get; set; }
internal static bool SOCKEYE_LOG_ENABLE_LOGGER_DIAGNOSTIC_LOG { get; set; }
//SECURITY
internal static string SOCKEYE_SET_SUPERUSER_PW { get; set; }
//HELPFUL INFORMATION FOR DIAGNOSTICS
internal static Dictionary<string, string> BOOT_DIAGNOSTIC_INFO { get; set; } = new Dictionary<string, string>();
internal static Dictionary<string, string> DBSERVER_DIAGNOSTIC_INFO { get; set; } = new Dictionary<string, string>();
/// <summary>
/// Populate the config from the configuration found at boot
/// called by program.cs
/// </summary>
/// <param name="config"></param>
internal static void SetConfiguration(IConfigurationRoot config)
{
#if (DEBUG)
TranslationKeysRequested = new List<string>();
#endif
bool? bTemp = null;
#region SERVER BASICS
//LANGUAGE
//TranslationBiz will validate this later at boot pfc and ensure a sane default is set (English)
SOCKEYE_DEFAULT_TRANSLATION = config.GetValue<string>("SOCKEYE_DEFAULT_TRANSLATION");
SOCKEYE_DEFAULT_TRANSLATION = string.IsNullOrWhiteSpace(SOCKEYE_DEFAULT_TRANSLATION) ? "en" : SOCKEYE_DEFAULT_TRANSLATION;
string lowTranslation = SOCKEYE_DEFAULT_TRANSLATION.ToLowerInvariant();
switch (lowTranslation)
{
case "en":
case "english":
SOCKEYE_DEFAULT_TRANSLATION = "en";
break;
case "de":
case "deutsch":
case "german":
SOCKEYE_DEFAULT_TRANSLATION = "de";
break;
case "es":
case "español":
case "spanish":
SOCKEYE_DEFAULT_TRANSLATION = "es";
break;
case "fr":
case "français":
case "french":
SOCKEYE_DEFAULT_TRANSLATION = "fr";
break;
default:
SOCKEYE_DEFAULT_TRANSLATION = "en";
break;
}
//LOGLEVEL
SOCKEYE_LOG_LEVEL = config.GetValue<string>("SOCKEYE_LOG_LEVEL");
SOCKEYE_LOG_LEVEL = string.IsNullOrWhiteSpace(SOCKEYE_LOG_LEVEL) ? "Info" : SOCKEYE_LOG_LEVEL;
//LOGGING DIAGNOSTIC LOG
bTemp = config.GetValue<bool?>("SOCKEYE_LOG_ENABLE_LOGGER_DIAGNOSTIC_LOG");
SOCKEYE_LOG_ENABLE_LOGGER_DIAGNOSTIC_LOG = (null == bTemp) ? false : (bool)bTemp;
//PORT / API
SOCKEYE_USE_URLS = config.GetValue<string>("SOCKEYE_USE_URLS");
SOCKEYE_USE_URLS = string.IsNullOrWhiteSpace(SOCKEYE_USE_URLS) ? "http://*:7575" : SOCKEYE_USE_URLS;
SOCKEYE_JWT_SECRET = config.GetValue<string>("SOCKEYE_JWT_SECRET");
//backdoor back door password superuser reset
SOCKEYE_SET_SUPERUSER_PW = config.GetValue<string>("SOCKEYE_SET_SUPERUSER_PW");
//REPORT RENDERING
//RENDER ENGINE PATH
SOCKEYE_REPORT_RENDER_BROWSER_PATH = ActualFullPath(config.GetValue<string>("SOCKEYE_REPORT_RENDER_BROWSER_PATH"));
//RENDER ENGINE PARAMS
SOCKEYE_REPORT_RENDER_BROWSER_PARAMS = config.GetValue<string>("SOCKEYE_REPORT_RENDER_BROWSER_PARAMS");
//PROCESS CONTROL
int? nTemp = config.GetValue<int?>("SOCKEYE_REPORT_RENDERING_TIMEOUT");
SOCKEYE_REPORT_RENDERING_TIMEOUT = (null == nTemp) ? 5 : (int)nTemp;//default
if (SOCKEYE_REPORT_RENDERING_TIMEOUT < 1) SOCKEYE_REPORT_RENDERING_TIMEOUT = 1; //one minute minimum timeout
//DB
SOCKEYE_DB_CONNECTION = config.GetValue<string>("SOCKEYE_DB_CONNECTION");
bTemp = config.GetValue<bool?>("SOCKEYE_PERMANENTLY_ERASE_DATABASE");
SOCKEYE_PERMANENTLY_ERASE_DATABASE = (null == bTemp) ? false : (bool)bTemp;
//FOLDERS
string DataFolderPath = ActualFullPath(config.GetValue<string>("SOCKEYE_DATA_PATH"));
string LogPath = ActualFullPath(config.GetValue<string>("SOCKEYE_LOG_PATH"));
string AttachmentFilesPath = ActualFullPath(config.GetValue<string>("SOCKEYE_ATTACHMENT_FILES_PATH"));
string BackupFilesPath = ActualFullPath(config.GetValue<string>("SOCKEYE_BACKUP_FILES_PATH"));
string TempFilesPath = ActualFullPath(config.GetValue<string>("SOCKEYE_TEMP_FILES_PATH"));
SOCKEYE_BACKUP_PG_DUMP_PATH = ActualFullPath(config.GetValue<string>("SOCKEYE_BACKUP_PG_DUMP_PATH"));
if (string.IsNullOrWhiteSpace(DataFolderPath))
{
//In this case *must* have paths for *everything* specified
if (string.IsNullOrWhiteSpace(LogPath))
throw new System.ArgumentNullException("SOCKEYE_LOG_PATH configuration setting missing and required");
if (string.IsNullOrWhiteSpace(AttachmentFilesPath))
throw new System.ArgumentNullException("SOCKEYE_ATTACHMENT_FILES_PATH configuration setting missing and required");
if (string.IsNullOrWhiteSpace(BackupFilesPath))
throw new System.ArgumentNullException("SOCKEYE_BACKUP_FILES_PATH configuration setting missing and required");
if (string.IsNullOrWhiteSpace(TempFilesPath))
throw new System.ArgumentNullException("SOCKEYE_TEMP_FILES_PATH configuration setting missing and required");
}
//set paths
SOCKEYE_LOG_PATH = (string.IsNullOrWhiteSpace(LogPath)) ? Path.Combine(DataFolderPath, "logs") : LogPath;
SOCKEYE_ATTACHMENT_FILES_PATH = (string.IsNullOrWhiteSpace(AttachmentFilesPath)) ? Path.Combine(DataFolderPath, "attachments") : AttachmentFilesPath;
SOCKEYE_BACKUP_FILES_PATH = (string.IsNullOrWhiteSpace(BackupFilesPath)) ? Path.Combine(DataFolderPath, "backups") : BackupFilesPath;
SOCKEYE_TEMP_FILES_PATH = (string.IsNullOrWhiteSpace(TempFilesPath)) ? Path.Combine(DataFolderPath, "temp") : TempFilesPath;
#endregion server BASICS
}
internal static string ActualFullPath(string p)
{
if (string.IsNullOrWhiteSpace(p))
return string.Empty;
return Path.GetFullPath(FileUtil.StringPathDecodeEnvironmentVariables(p));
}
//Fetch first url from list of urls (used by generator)
internal static string FirstOfSockeyeUseUrls
{
get
{
if (string.IsNullOrWhiteSpace(SOCKEYE_USE_URLS))
{ return null; }
if (!SOCKEYE_USE_URLS.Contains(";"))
{
return SOCKEYE_USE_URLS.Replace("*", "localhost");
}
var s = SOCKEYE_USE_URLS.Split(';');
return s[0].Replace("*", "localhost");
}
}
}//eoc
}//eons

View File

@@ -0,0 +1,37 @@
using System.Linq;
using Sockeye.Models;
namespace Sockeye.Util
{
/// <summary>
/// Contains static mirror copy in memory of global settings values that are set from DB during boot
/// and accessible to Biz admin user (equivalent of v7's global object)
/// used by many areas of the biz logic and processing too often to fetch on every request
/// set at boot and on any update to the db global biz settings record
/// </summary>
internal static class ServerGlobalBizSettings
{
internal static GlobalBizSettings Cache { get; set; }
/// <summary>
/// Populate and / or create the settings
/// </summary>
internal static void Initialize(GlobalBizSettings global, AyContext ct)
{
if (global == null)
{
//fetch or create as not provided (meaning this was called from Startup.cs)
global = ct.GlobalBizSettings.FirstOrDefault(z => z.Id == 1);
if (global == null)
{
global = new GlobalBizSettings();
ct.GlobalBizSettings.Add(global);
ct.SaveChanges();
}
}
Cache = global;
}
}//eoc
}//eons

View File

@@ -0,0 +1,72 @@
using System;
using System.Linq;
using Sockeye.Models;
namespace Sockeye.Util
{
/// <summary>
/// Contains static mirror copy in memory of Operations related settings stored in db
/// that are accessed frequently by server
/// </summary>
internal static class ServerGlobalOpsSettingsCache
{
//BOOT flag, set during boot and
//is used to control generator from starting
internal static bool BOOTING { get; set; }
//False if db server is detected to be down
//is used to control generator from processing
internal static bool DBAVAILABLE { get; set; }
internal static GlobalOpsBackupSettings Backup { get; set; }
internal static GlobalOpsNotificationSettings Notify { get; set; }
internal static DateTime NextBackup { get; set; }
/// <summary>
/// Populate and / or create the settings
/// </summary>
internal static void Initialize(AyContext ct = null)
{
//fetch or create as not provided (meaning this was called from Startup.cs)
Backup = ct.GlobalOpsBackupSettings.FirstOrDefault(z => z.Id == 1);
if (Backup == null)
{
Backup = new GlobalOpsBackupSettings();
ct.GlobalOpsBackupSettings.Add(Backup);
ct.SaveChanges();
}
NextBackup = FileUtil.MostRecentAutomatedBackupFileDate();
SetNextBackup();
Notify = ct.GlobalOpsNotificationSettings.FirstOrDefault(z => z.Id == 1);
if (Notify == null)
{
Notify = new GlobalOpsNotificationSettings();
ct.GlobalOpsNotificationSettings.Add(Notify);
ct.SaveChanges();
}
}
internal static void SetNextBackup()
{
DateTime utcNow = DateTime.UtcNow;
//Has last backup run more than 24 hours ago?
if (NextBackup < utcNow.AddHours(-24))
{
//set it to today then
NextBackup = new DateTime(utcNow.Year, utcNow.Month, utcNow.Day, Backup.BackupTime.Hour, Backup.BackupTime.Minute, 0, DateTimeKind.Utc);
//Make sure next backup is in the future
if (NextBackup < utcNow)
NextBackup = NextBackup.AddDays(1);
}
else
{
//less than 24 hours, set it to tomorrow
NextBackup = new DateTime(utcNow.Year, utcNow.Month, utcNow.Day, Backup.BackupTime.Hour, Backup.BackupTime.Minute, 0, DateTimeKind.Utc);
NextBackup = NextBackup.AddDays(1);
}
}
}//eoc
}//eons

View File

@@ -0,0 +1,62 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Sockeye.Models;
namespace Sockeye.Util
{
/// <summary>
/// Shared service provider for static classes
/// </summary>
internal static class ServiceProviderProvider
{
private static IServiceProvider _provider;
internal static IServiceProvider Provider
{
get { return _provider; }
set { _provider = value; }
}
internal static IServiceScope Scope
{
get
{
return Provider.CreateScope();
}
}
internal static AyContext DBContext
{
get
{
return Scope.ServiceProvider.GetRequiredService<AyContext>();
}
}
internal static Sockeye.Api.ControllerHelpers.ApiServerState ServerState
{
get
{
return Scope.ServiceProvider.GetRequiredService<Sockeye.Api.ControllerHelpers.ApiServerState>();
}
}
internal static Sockeye.Util.IMailer Mailer
{
get
{
return Scope.ServiceProvider.GetRequiredService<Sockeye.Util.IMailer>();
}
}
// https://docs.microsoft.com/en-us/aspnet/core/fundamentals/http-requests?view=aspnetcore-3.1
internal static System.Net.Http.IHttpClientFactory HttpClientFactory
{
get
{
return Scope.ServiceProvider.GetRequiredService<System.Net.Http.IHttpClientFactory>();
}
}
}
}

View File

@@ -0,0 +1,12 @@
namespace Sockeye.Util
{
/// <summary>
/// Version strings centrally located for convenience
/// </summary>
internal static class SockeyeVersion
{
public const string VersionString = "8.0.28";
public const string FullNameAndVersion = "Sockeye server " + VersionString;
public const string CurrentApiVersion="v8";
}//eoc
}//eons

143
server/util/StringUtil.cs Normal file
View File

@@ -0,0 +1,143 @@
using System;
namespace Sockeye.Util
{
internal static class StringUtil
{
/// <summary>
/// Extract string between tokens
/// </summary>
/// <param name="s"></param>
/// <param name="openTag"></param>
/// <param name="closeTag"></param>
/// <returns></returns>
public static string Extract(string s, string openTag, string closeTag)
{
int startIndex = s.IndexOf(openTag);
if (startIndex == -1)
throw new System.IndexOutOfRangeException("ExtractString->Error: open tag not found");
startIndex += openTag.Length;
int endIndex = s.IndexOf(closeTag, startIndex);
if (endIndex == -1)
throw new System.IndexOutOfRangeException("ExtractString->Error: closing tag not found");
return s.Substring(startIndex, endIndex - startIndex);
}
/// <summary>
/// Trim a string if necessary
/// </summary>
/// <param name="s"></param>
/// <param name="maxLength"></param>
/// <returns></returns>
public static string MaxLength(string s, int maxLength)
{
if (s.Length > maxLength)
s = s.Substring(0, maxLength);
return s;
}
/// <summary>
/// mask the exact ip address by substituting the last position of the address with XXX
/// Works with v6 or v4 addresses as strings
/// </summary>
/// <param name="sIP"></param>
/// <returns></returns>
public static string MaskIPAddress(string sIP)
{
//My test station ip address!?
//"::ffff:127.0.0.1"
//weird dual format, new method that covers both v4 and v4 inside v6 format
if (sIP.Contains("."))
{
//new algorithm, replace anything after last period with an xxx
var ret = sIP.Substring(0, sIP.LastIndexOf(".")) + ".xxx";
return ret;
}
//8 groups IPV6 Address format
if (sIP.Contains(":"))
{
sIP = sIP.Replace("::", ":0:");//rehydrate "compressed" addresses
var segs = sIP.Split(':');
if (segs.Length < 7)
return "UNRECOGNIZED V6 IP ADDRESS FORMAT";
else
return segs[0] + ":" + segs[1] + ":" + segs[2] + ":" + segs[3] + ":" + segs[4] + ":" + segs[5] + ":" + segs[6] + ":" + segs[7] + ":xxxx";
}
// //4 groups IPV4 Address format
// if (sIP.Contains("."))
// {
// //8 groups IPV6 Address format
// var segs = sIP.Split('.');
// if (segs.Length < 3)
// return "UNRECOGNIZED V4 IP ADDRESS FORMAT";
// else
// return segs[0] + "." + segs[1] + "." + segs[2] + ".xxx";
// }
return "UNRECOGNIZED IP ADDDRESS FORMAT";
}
//Used to ensure a unique name generated by appending -nnn is within length requirements by splitting and chopping part of text to keep name
public static string UniqueNameBuilder(string oldName, long appendValue, int maxLength)
{
//deadman switch
if (appendValue > int.MaxValue)
{
throw new System.OverflowException($"UniqueNameBuilder: Unique name could not be generated for item \"{oldName}\" after {int.MaxValue.ToString()} attempts");
}
var appendString = "-" + appendValue.ToString();
string ret = oldName + appendString;
var diff = maxLength - ret.Length;
if (diff < 0)
{
ret = oldName.Substring(0, Math.Abs(diff)) + appendString;
}
return ret;
}
//used to trim an enum type down to only it's most relevant (rightmost) portion
public static string TrimTypeName(string str)
{
if (str.Contains('.'))
{
return str.Substring(str.LastIndexOf('.') + 1);
}
return str;
}
public static string ReplaceLastOccurrence(string source, string find, string replace)
{
if (source == null) { return source; }
int place = source.LastIndexOf(find);
if (place == -1)
return source;
string result = source.Remove(place, find.Length).Insert(place, replace);
return result;
}
public static int HexToInt(string h)
{
int r = 0;
int.TryParse(h, System.Globalization.NumberStyles.HexNumber, null, out r);
return r;
}
}//eoc
}//eons

45
server/util/TaskUtil.cs Normal file
View File

@@ -0,0 +1,45 @@
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Sockeye.Util
{
internal static class TaskUtil
{
///<summary>
///Usage:
///await WithTimeoutAfterStart(ctoken => SomeOperationAsync(ctoken), TimeSpan.FromMilliseconds(n));
///in callee call this regularly ctoken.ThrowIfCancellationRequested();
///and pass the ctoken into any time consuming system methods called in turn
///</summary>
public static async Task WithTimeoutAfterStart(Func<CancellationToken, Task> operation, TimeSpan timeout)
{
//https://stackoverflow.com/a/23478628/8939
var source = new CancellationTokenSource();
var task = operation(source.Token);
source.CancelAfter(timeout);
await task;
}
///<summary>
///fire and forget a task but bubble up any exceptions with optional ignore some
///</summary>
public static async void Forget(this Task task, params Type[] acceptableExceptions)
{
//https://stackoverflow.com/a/22864616/8939
try
{
await task.ConfigureAwait(false);
}
catch (Exception ex)
{
if (!acceptableExceptions.Contains(ex.GetType()))
throw;
}
}
}//eoc
}//eons

68
server/util/VizCache.cs Normal file
View File

@@ -0,0 +1,68 @@
using System;
using System.Collections.Generic;
namespace Sockeye.Util
{
//Viz cache - used by biz objects during get report data to temporarily cache values from database for single request
//saves db calls and formatting
internal class VizCache
{
private Dictionary<string, string> _vizCache = new Dictionary<string, string>();
// internal VizCache()
// {
// System.Diagnostics.Debug.WriteLine("constructing vizcache");
// }
internal void Clear()
{
_vizCache.Clear();
}
internal void Add(string value, string key, long? id = 0)
{
if (value == null) value = string.Empty;
_vizCache[$"{key}{id}"] = value;
// System.Diagnostics.Debug.WriteLine($"ADD {key}{id} - {value}");
}
internal string Get(string key, long? id = 0)
{
string value = null;
if (_vizCache.TryGetValue($"{key}{id}", out value))
{
//System.Diagnostics.Debug.WriteLine($"vizGet cache hit {key}{id}");
return value;
}
else
{
//System.Diagnostics.Debug.WriteLine($"vizGet cache miss {key}{id}");
return null;
}
}
internal bool Has(string key, long? id = 0)
{
return _vizCache.ContainsKey($"{key}{id}");
}
internal bool GetAsBool(string key, long? id = 0)
{
var s = Get(key, id);
if (string.IsNullOrWhiteSpace(s)) return false;
return bool.Parse(s);
}
internal decimal? GetAsDecimal(string key, long? id = 0)
{
var s = Get(key, id);
if (string.IsNullOrWhiteSpace(s)) return null;
return decimal.Parse(s);
}
internal long? GetAsLong(string key, long? id = 0)
{
var s = Get(key, id);
if (string.IsNullOrWhiteSpace(s)) return null;
return long.Parse(s);
}
}//eoc
}//eons