32fb68fa68
- Cleanup codice area MapoDb
315 lines
12 KiB
C#
315 lines
12 KiB
C#
using MapoSDK;
|
|
using MongoDB.Driver;
|
|
using SteamWare;
|
|
using System;
|
|
using System.Collections.Generic;
|
|
using System.Diagnostics;
|
|
using System.Text;
|
|
|
|
namespace MapoDb
|
|
{
|
|
/**************************************************************
|
|
* Classe gestione FluxLog
|
|
*
|
|
* automaticamente gestisce dati su SqlServer / Redis (+recenti)
|
|
* oppure su MongoDB (+ vecchi)
|
|
*
|
|
* rif x mongoDB
|
|
*
|
|
* rif x windows setup (abilitazione firewall, remote access, auth...)
|
|
*
|
|
* https://docs.mongodb.com/manual/tutorial/configure-windows-netsh-firewall/
|
|
* https://www.configserverfirewall.com/mongodb/mongodb-allow-remote-connections/
|
|
* https://www.shellhacks.com/mongodb-allow-remote-access/
|
|
* https://stackoverflow.com/questions/3891657/setup-mongodb-on-windows-server-2008
|
|
*
|
|
* (conf file in C:\Program Files\MongoDB\Server\4.2\bin)
|
|
*
|
|
*
|
|
* test rilettura: https://stackoverflow.com/questions/2943222/find-objects-between-two-dates-mongodb
|
|
* https://studio3t.com/knowledge-base/articles/mongodb-find-method/
|
|
* https://github.com/mongodb/mongo-csharp-driver
|
|
* http://www.binaryheap.com/tech/time-series-data-in-mongodb/
|
|
* https://www.codeproject.com/Tips/760099/Resampling-and-merging-time-series-data-using-LINQ
|
|
* https://docs.microsoft.com/it-it/aspnet/core/tutorials/first-mongo-app?view=aspnetcore-3.0&tabs=visual-studio
|
|
* https://www.codementor.io/pmbanugo/working-with-mongodb-in-net-1-basics-g4frivcvz
|
|
* https://www.codementor.io/pmbanugo/working-with-mongodb-in-net-2-retrieving-mrlbeanm5
|
|
* https://www.codementor.io/pmbanugo/working-with-mongodb-in-net-part-3-skip-sort-limit-and-projections-oqfwncyka
|
|
*************************************************************/
|
|
|
|
/// <summary>
|
|
/// Classe che si occupa di gestire i dati di flusso
|
|
/// - i dati inizialmente sono in REDIS e DB quando accumulati da IOB
|
|
/// - successivamente i dati vengono archiviati come oggetti BSON in MongoDB
|
|
/// </summary>
|
|
public class FluxArchive
|
|
{
|
|
#region Public Fields
|
|
|
|
public static FluxArchive man = new FluxArchive();
|
|
|
|
#endregion Public Fields
|
|
|
|
#region Public Constructors
|
|
|
|
/// <summary>
|
|
/// Classe gestione archivio allarmi
|
|
/// </summary>
|
|
public FluxArchive()
|
|
{
|
|
#if false
|
|
// rifare avvio con lettura da web.config...
|
|
client = new MongoClient(mdbConnString);
|
|
database = client.GetDatabase("MAPO");
|
|
#endif
|
|
|
|
database = memLayer.ML.getMongoDatabase("MAPO");
|
|
// init oggetto datalayer
|
|
DataLayerObj = new DataLayer();
|
|
}
|
|
|
|
#endregion Public Constructors
|
|
|
|
#region Public Methods
|
|
|
|
/// <summary>
|
|
/// Converte una tabella dati da DB in una lista di oggetti pronti x salvataggio su MongoDb
|
|
/// </summary>
|
|
/// <param name="tabDati">Tabelal dei dati RAW registrati</param>
|
|
/// <param name="tStamp">Data di riferimento</param>
|
|
/// <param name="periodo">
|
|
/// Periodo del campionamento desiderato (e passato coi dati che sono relativi a tale periodo...)
|
|
/// </param>
|
|
public List<rawData> convertTable(DS_applicazione.FluxLogDataTable tabDati, DateTime tStamp, timeWindow periodo)
|
|
{
|
|
// init oggetti
|
|
List<rawData> listaRecords = new List<rawData>();
|
|
rawData currRecord = null;
|
|
List<rawSample> valori = new List<rawSample>();
|
|
rawSample campione = null;
|
|
string currFlux = "";
|
|
string currMacc = "";
|
|
string tStampString = tStamp.ToString("yyyyMMdd");
|
|
int tStampYMD = 0;
|
|
int.TryParse(tStampString, out tStampYMD);
|
|
foreach (var item in tabDati)
|
|
{
|
|
// controllo flusso x nuovo record...
|
|
if (item.CodFlux != currFlux || item.IdxMacchina != currMacc)
|
|
{
|
|
// salvo il VECCHIO record...
|
|
if (currRecord != null)
|
|
{
|
|
currRecord.samples = valori;
|
|
listaRecords.Add(currRecord);
|
|
valori = null;
|
|
currRecord = null;
|
|
}
|
|
// nuovi record...
|
|
currFlux = item.CodFlux;
|
|
currMacc = item.IdxMacchina;
|
|
valori = new List<rawSample>();
|
|
currRecord = new rawData()
|
|
{
|
|
macName = currMacc,
|
|
varName = currFlux,
|
|
varType = plcDataType.Real, // fare!!!
|
|
dateYMD = tStampYMD,
|
|
period = periodo
|
|
};
|
|
}
|
|
campione = new rawSample()
|
|
{
|
|
timeStamp = item.dtEvento,
|
|
value = item.Valore
|
|
};
|
|
// aggiunta sample ai record prec...
|
|
if (campione != null && currRecord != null)
|
|
{
|
|
valori.Add(campione);
|
|
campione = null;
|
|
}
|
|
}
|
|
// aggiungo ultimo... salvo il VECCHIO record...
|
|
if (currRecord != null)
|
|
{
|
|
currRecord.samples = valori;
|
|
listaRecords.Add(currRecord);
|
|
}
|
|
return listaRecords;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Effettua il trasferimento dati da SqlDB a MongoDB
|
|
/// </summary>
|
|
/// <param name="data2transfer">data di aprtenza</param>
|
|
/// <param name="numDays">num giorni da trasferire</param>
|
|
public exeResult trasferDataFromDb(DateTime data2transfer, int numDays)
|
|
{
|
|
exeResult answ = new exeResult();
|
|
StringBuilder sb = new StringBuilder();
|
|
Stopwatch sw = new Stopwatch();
|
|
DateTime tStamp;
|
|
|
|
// oggetto gestione dati ts raggruppati
|
|
var collRawData = database.GetCollection<rawData>("FluxLogRawData");
|
|
var collHistData = database.GetCollection<histData>("FluxLogHistData");
|
|
var builderRaw = Builders<rawData>.Filter;
|
|
var builderHist = Builders<histData>.Filter;
|
|
string currDateStr = "";
|
|
int startYMD = 0;
|
|
int endYMD = 0;
|
|
int currYMD = 0;
|
|
|
|
// Effettuo eliminazione records x TUTTO il periodo dei dati RAW e stats...
|
|
string startDateStr = data2transfer.ToString("yyyyMMdd");
|
|
string endDateStr = data2transfer.AddDays(numDays).ToString("yyyyMMdd");
|
|
int.TryParse(startDateStr, out startYMD);
|
|
int.TryParse(endDateStr, out endYMD);
|
|
// filtri e delete
|
|
var filtRawOld = builderRaw.Gte(u => u.dateYMD, startYMD) & builderRaw.Lte(u => u.dateYMD, endYMD);
|
|
collRawData.DeleteMany(filtRawOld);
|
|
var filtHistOld = builderHist.Gte(u => u.dateYMD, startYMD) & builderHist.Lte(u => u.dateYMD, endYMD);
|
|
collHistData.DeleteMany(filtHistOld);
|
|
|
|
// faccio ciclo su OGNI macchina
|
|
sw.Start();
|
|
var elencoMacchine = DataLayerObj.taMacchine.GetData();
|
|
foreach (var macchina in elencoMacchine)
|
|
{
|
|
sb.AppendLine($"Inizio processing macchina {macchina.IdxMacchina}");
|
|
// faccio ciclo su gg richiesti...
|
|
for (int i = 0; i < numDays; i++)
|
|
{
|
|
tStamp = data2transfer.AddDays(i);
|
|
currDateStr = tStamp.ToString("yyyyMMdd");
|
|
int.TryParse(currDateStr, out currYMD);
|
|
// leggo da DB i records...
|
|
DS_applicazione.FluxLogDataTable tabDati = DataLayerObj.taFL.getFiltOrd(macchina.IdxMacchina, tStamp, tStamp.AddDays(1), true, "");
|
|
// solo se ho dati...
|
|
if (tabDati.Count > 0)
|
|
{
|
|
// chiamo procedura x conversione
|
|
List<rawData> risultato = convertTable(tabDati, tStamp, timeWindow.day);
|
|
// salvo in blocco
|
|
if (risultato.Count > 0)
|
|
{
|
|
try
|
|
{
|
|
collRawData.InsertMany(risultato);
|
|
// calcolo la NUOVA statistica giornaliera...
|
|
var resStats = getStats(risultato);
|
|
// salvo!
|
|
collHistData.InsertOne(resStats);
|
|
}
|
|
catch (Exception exc)
|
|
{
|
|
logger.lg.scriviLog($"trasferDataFromDb eccezione{Environment.NewLine}{exc}", tipoLog.EXCEPTION);
|
|
}
|
|
}
|
|
sb.AppendLine($"DB {tStamp}: {tabDati.Count}rec | MongoDb {risultato.Count} RAW DOC in {sw.ElapsedMilliseconds / 1000} sec");
|
|
}
|
|
}
|
|
}
|
|
// restituisco messaggio...
|
|
answ.esito = esitoExec.ok;
|
|
answ.message = sb.ToString();
|
|
return answ;
|
|
}
|
|
|
|
#endregion Public Methods
|
|
|
|
#region Protected Methods
|
|
|
|
/// <summary>
|
|
/// Calcola statistiche per i dati giornalieri processati
|
|
/// </summary>
|
|
/// <param name="datiGiornalieri"></param>
|
|
/// <returns></returns>
|
|
protected histData getStats(List<rawData> datiGiornalieri)
|
|
{
|
|
// init oggetti
|
|
histData currHist;
|
|
varStats currStat;
|
|
List<varStats> statistiche = new List<varStats>();
|
|
int sampleNumRec = 1;
|
|
float sampleTotal = 0;
|
|
float sampleMinVal = 0;
|
|
float sampleMaxVal = 0;
|
|
float sampleAvg = 0;
|
|
float currVal = 0;
|
|
bool fatto = false;
|
|
string macName = "";
|
|
int dateYMD = 0;
|
|
// ciclo x ogni variabile
|
|
foreach (var item in datiGiornalieri)
|
|
{
|
|
if (item.samples != null)
|
|
{
|
|
if (string.IsNullOrEmpty(macName))
|
|
{
|
|
macName = item.macName;
|
|
dateYMD = item.dateYMD;
|
|
}
|
|
// reset vabiabili
|
|
sampleTotal = 0;
|
|
sampleMinVal = 0;
|
|
sampleMaxVal = 0;
|
|
sampleAvg = 0;
|
|
// calcolo statistiche
|
|
sampleNumRec = item.samples.Count;
|
|
foreach (var sample in item.samples)
|
|
{
|
|
fatto = float.TryParse(sample.value, out currVal);
|
|
if (fatto)
|
|
{
|
|
sampleTotal += currVal;
|
|
sampleMaxVal = sampleMaxVal < currVal ? currVal : sampleMaxVal;
|
|
sampleMinVal = sampleMinVal > currVal ? currVal : sampleMinVal;
|
|
}
|
|
}
|
|
if (sampleNumRec > 0)
|
|
{
|
|
sampleAvg = sampleTotal / sampleNumRec;
|
|
}
|
|
// salvo statistica
|
|
currStat = new varStats()
|
|
{
|
|
varName = item.varName,
|
|
numRec = sampleNumRec,
|
|
avg = sampleAvg,
|
|
min = sampleMinVal,
|
|
max = sampleMaxVal
|
|
};
|
|
statistiche.Add(currStat);
|
|
}
|
|
}
|
|
currHist = new histData()
|
|
{
|
|
macName = macName,
|
|
varType = plcDataType.Real,
|
|
dateYMD = dateYMD,
|
|
period = timeWindow.day,
|
|
stats = statistiche
|
|
};
|
|
// restituisco...
|
|
return currHist;
|
|
}
|
|
|
|
#endregion Protected Methods
|
|
|
|
#region Private Fields
|
|
|
|
/// <summary>
|
|
/// Database corrente MongoDB
|
|
/// </summary>
|
|
private IMongoDatabase database;
|
|
|
|
/// <summary>
|
|
/// Oggetto datalayer
|
|
/// </summary>
|
|
private DataLayer DataLayerObj = new DataLayer();
|
|
|
|
#endregion Private Fields
|
|
}
|
|
} |