This commit is contained in:
2022-03-24 00:18:26 +00:00
parent ce8bdcd67e
commit 94e9632caf
4 changed files with 25 additions and 9 deletions

2
.vscode/launch.json vendored
View File

@@ -48,7 +48,7 @@
"AYANOVA_DATA_PATH": "c:\\temp\\ravendata",
"AYANOVA_USE_URLS": "http://*:7575;",
//"AYANOVA_PERMANENTLY_ERASE_DATABASE":"true",
"AYANOVA_SERVER_TEST_MODE": "false",
"AYANOVA_SERVER_TEST_MODE": "true",
"AYANOVA_SERVER_TEST_MODE_TZ_OFFSET": "-8",
//"AYANOVA_REPORT_RENDERING_TIMEOUT":"1",
"AYANOVA_SERVER_TEST_MODE_SEEDLEVEL": "small",

View File

@@ -1,6 +1,22 @@
# now
Import / export features
Export - JSON only, no csv, remove the csv export, export anything exactly as it is now probably no change required
Import / update
limited specific subset of objects from admin import page (rename that to "import / update"?)
JSON AND CSV both supported
CSV column headers MUST be the same names as JSON property names
CSV is turned INTO JSON at the client then submitted to the server so the server only handles importing JSON, never sees CSV
Import and update are same code and handled by CONVENTION (salesforce uses this convention)
Matching is always done by unique name (or whatever the unique identifier is)
Any new objects that don't match by name in the file are added always without exception
Any matching objects that are in the file are UPDATED to match the file values
Specific fields can be NOT updated by simply not including them in the import file
This is the convention
Import feature
currently imports in the format as exporting,
@@ -8,6 +24,7 @@ Import feature
A proper feature for this would likely need to work with a more limited subset of records and it needs to be targetted to a specific task only like in v7
in v7 you don't import head office with client, just client alone as it limits the fields and collections to be imported
I'm thinking rather than throwing a generic solution at this it needs to be targetted to work direclty with the types in question
Almost need to ditch JSON export entirely?
actually, json export is ok, it's the json import that is sketchy, perhaps if it stripped fields? Or created a head office from the viz field if found and ignored ID's?
do we want data transfer this way ayanova to ayanova supporting everything?? That sounds more like some kind of other synch tool or something

View File

@@ -18,7 +18,7 @@
<ItemGroup>
<PackageReference Include="Bogus" Version="34.0.1" />
<PackageReference Include="BouncyCastle.NetCore" Version="1.8.10" />
<PackageReference Include="CsvHelper" Version="27.2.1" />
<PackageReference Include="ChoETL.JSON.NETStandard" Version="1.2.1.42" />
<PackageReference Include="Enums.NET" Version="4.0.0" />
<PackageReference Include="jose-jwt" Version="3.2.0" />
<PackageReference Include="MailKit" Version="3.1.1" />

View File

@@ -13,9 +13,8 @@ using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System.IO;
using System.IO.Compression;
using CsvHelper;
using ChoETL;
using System;
using System.Globalization;
namespace AyaNova.Api.Controllers
{
@@ -101,11 +100,11 @@ namespace AyaNova.Api.Controllers
switch (format)
{
case "csv":
using (StreamWriter file = System.IO.File.CreateText(outputSourceFullPath))
using (var csv = new CsvHelper.CsvWriter(file, CultureInfo.InvariantCulture))
using (var w = new ChoCSVWriter(outputSourceFullPath).WithFirstLineHeader().ThrowAndStopOnMissingField(false).WithMaxScanRows(100))
{
var dat = await ((IExportAbleObject)biz).GetExportData(selectedRequest, Guid.Empty);
csv.WriteRecords(dat);
//max scan rows means how many rows it will scan to determine field types so this affects tags because it will scan the first 100 to see the maximum tag count then only ever output that many
var dat = await ((IExportAbleObject)biz).GetExportData(selectedRequest, Guid.Empty);//todo: jobify
w.Write(ToDynamicList(dat));
}
break;
case "json":
@@ -131,7 +130,7 @@ namespace AyaNova.Api.Controllers
catch (ReportRenderTimeOutException)
{
log.LogInformation($"RenderExport timeout data list key: {selectedRequest.DataListKey}, record count:{selectedRequest.SelectedRowIds.LongLength}, user:{UserNameFromContext.Name(HttpContext.Items)} ");
return BadRequest(new ApiErrorResponse(ApiErrorCode.INVALID_OPERATION, null, "timeout - select fewer records"));
return BadRequest(new ApiErrorResponse(ApiErrorCode.INVALID_OPERATION, null, "timeout - select fewer records"));
}
}