diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/AquariusClientExtensions.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/AquariusClientExtensions.cs index 986ba668..b92d4170 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/AquariusClientExtensions.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/AquariusClientExtensions.cs @@ -13,6 +13,13 @@ public static Guid GetTimeSeriesUniqueId(this IAquariusClient client, string ide if (Guid.TryParse(identifier, out var uniqueId)) return uniqueId; + var timeSeriesDescription = client.GetTimeSeriesDescription(identifier); + + return timeSeriesDescription.UniqueId; + } + + public static TimeSeriesDescription GetTimeSeriesDescription(this IAquariusClient client, string identifier) + { var location = TimeSeriesIdentifierParser.ParseLocationIdentifier(identifier); var response = client.Publish.Get(new TimeSeriesDescriptionServiceRequest { LocationIdentifier = location }); @@ -22,7 +29,7 @@ public static Guid GetTimeSeriesUniqueId(this IAquariusClient client, string ide if (timeSeriesDescription == null) throw new ExpectedException($"Can't find '{identifier}' at location '{location}'"); - return timeSeriesDescription.UniqueId; + return timeSeriesDescription; } public static TimeSeries GetTimeSeriesInfo(this IAquariusClient client, string identifier) diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/Context.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/Context.cs index 6d0972d7..a1d20bca 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/Context.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/Context.cs @@ -9,6 +9,8 @@ namespace PointZilla { public class Context { + public string ExecutingFileVersion { get; set; } + public string Server { get; set; } public string Username { get; set; } = "admin"; public string Password { get; set; } = "admin"; @@ -35,10 +37,13 @@ public class Context public string ComputationPeriodIdentifier { get; set; } public string SubLocationIdentifier { get; set; } public List ExtendedAttributeValues { get; set; } = new List(); + public TimeSeriesType? TimeSeriesType { get; set; } public TimeSeriesIdentifier SourceTimeSeries { get; set; } public Instant? SourceQueryFrom { get; set; } public Instant? SourceQueryTo { get; set; } + public string SaveCsvPath { get; set; } + public bool StopAfterSavingCsv { get; set; } public List ManualPoints { get; set; } = new List(); diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvReader.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvReader.cs index 13815264..86daf9d9 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvReader.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvReader.cs @@ -161,7 +161,7 @@ private ReflectedTimeSeriesPoint ParsePoint(string[] fields) if (int.TryParse(text, out var grade)) gradeCode = grade; }); - ParseField(fields, Context.CsvQualifiersField, text => qualifiers = text.Split(QualifierDelimeters, StringSplitOptions.RemoveEmptyEntries).ToList()); + ParseField(fields, Context.CsvQualifiersField, text => qualifiers = text.Split(QualifierDelimiters, StringSplitOptions.RemoveEmptyEntries).ToList()); if (time == null) return null; @@ -175,7 +175,7 @@ private ReflectedTimeSeriesPoint ParsePoint(string[] fields) }; } - private static readonly char[] QualifierDelimeters = {','}; + private static readonly char[] QualifierDelimiters = {','}; private static void ParseField(string[] fields, int fieldIndex, Action parseAction) { diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvWriter.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvWriter.cs new file mode 100644 index 00000000..705aaba6 --- /dev/null +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/CsvWriter.cs @@ -0,0 +1,161 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using Aquarius.TimeSeries.Client.ServiceModels.Acquisition; +using NodaTime; +using NodaTime.Text; +using ServiceStack.Logging; + +namespace PointZilla +{ + public class CsvWriter + { + private static readonly ILog Log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); + + private Context Context { get; } + + public CsvWriter(Context context) + { + Context = context; + } + + public void WritePoints(List points) + { + var timeSeriesIdentifier = CreateTimeSeriesIdentifier(); + + var csvPath = Directory.Exists(Context.SaveCsvPath) + ? Path.Combine(Context.SaveCsvPath, SanitizeFilename($"{timeSeriesIdentifier.Identifier}.{CreatePeriod(Context.SourceQueryFrom, Context.SourceQueryTo)}.csv")) + : Context.SaveCsvPath; + + Log.Info($"Saving {points.Count} extracted points to '{csvPath}' ..."); + + var dir = Path.GetDirectoryName(csvPath); + + if (!string.IsNullOrEmpty(dir)) + { + Directory.CreateDirectory(dir); + } + + using (var writer = new StreamWriter(csvPath)) + { + var offsetPattern = OffsetPattern.CreateWithInvariantCulture("m"); + var utcOffsetText = $"UTC{offsetPattern.Format(Context.UtcOffset ?? Offset.Zero)}"; + var period = CreatePeriod(Context.SourceQueryFrom ?? Instant.MinValue, Context.SourceQueryTo ?? Instant.MaxValue); + + writer.WriteLine($"# {Path.GetFileName(csvPath)} generated by {Context.ExecutingFileVersion}"); + writer.WriteLine($"#"); + writer.WriteLine($"# Time series identifier: {timeSeriesIdentifier.Identifier}"); + writer.WriteLine($"# Location: {timeSeriesIdentifier.LocationIdentifier}"); + writer.WriteLine($"# UTC offset: ({utcOffsetText})"); + writer.WriteLine($"# Value units: {Context.Unit}"); + writer.WriteLine($"# Value parameter: {timeSeriesIdentifier.Parameter}"); + writer.WriteLine($"# Interpolation type: {Context.InterpolationType}"); + writer.WriteLine($"# Time series type: {Context.TimeSeriesType}"); + writer.WriteLine($"#"); + writer.WriteLine($"# Export options: Corrected signal from {period.StartText} to {period.EndText}"); + writer.WriteLine($"#"); + writer.WriteLine($"# CSV data starts at line 15."); + writer.WriteLine($"#"); + writer.WriteLine($"ISO 8601 UTC, Value, Grade, Qualifiers"); + + foreach (var point in points) + { + var time = point.Time ?? Instant.MinValue; + + writer.WriteLine($"{InstantPattern.ExtendedIsoPattern.Format(time)}, {point.Value:G12}, {point.GradeCode}, {FormatQualifiers(point.Qualifiers)}"); + } + } + } + + public static void SetPointZillaCsvFormat(Context context) + { + // Match PointZilla Export format above + + // # CSV data starts at line 15. + // # + // ISO 8601 UTC, Value, Grade, Qualifiers + // 2015-12-04T00:01:00Z, 3.523200823975, 500, + // 2015-12-04T00:02:00Z, 3.525279357147, 500, + + context.CsvTimeField = 1; + context.CsvValueField = 2; + context.CsvGradeField = 3; + context.CsvQualifiersField = 4; + context.CsvComment = "#"; + context.CsvSkipRows = 0; + context.CsvTimeFormat = null; + context.CsvIgnoreInvalidRows = true; + context.CsvRealign = false; + } + + private TimeSeriesIdentifier CreateTimeSeriesIdentifier() + { + if (Context.SourceTimeSeries != null) + return Context.SourceTimeSeries; + + if (!string.IsNullOrEmpty(Context.TimeSeries)) + return TimeSeriesIdentifierParser.ParseExtendedIdentifier(Context.TimeSeries); + + string parameter; + var label = "Points"; + var locationIdentifier = "PointZilla"; + + if (Context.Command == CommandType.DeleteAllPoints) + parameter ="Deleted"; + + else if (Context.ManualPoints.Any()) + parameter = "ManuallyEntered"; + else if (Context.CsvFiles.Any()) + parameter = "OtherCsvFile"; + else + parameter = Context.WaveformType.ToString(); + + return new TimeSeriesIdentifier + { + Parameter = parameter, + Label = label, + LocationIdentifier = locationIdentifier, + Identifier = $"{parameter}.{label}@{locationIdentifier}" + }; + } + + private static string CreatePeriod(Instant? startTime, Instant? endTime) + { + var start = startTime ?? Instant.MinValue; + var end = endTime ?? Instant.MaxValue; + + if (start == Instant.MinValue && end == Instant.MaxValue) + return "EntireRecord"; + + var period = CreatePeriod(start, end); + + return $"{period.StartText}.{period.EndText}"; + } + + private static (string StartText, string EndText) CreatePeriod(Instant start, Instant end) + { + return ( + start == Instant.MinValue ? "StartOfRecord" : InstantPattern.ExtendedIsoPattern.Format(start), + end == Instant.MaxValue ? "EndOfRecord" : InstantPattern.ExtendedIsoPattern.Format(end) + ); + + } + + private static string FormatQualifiers(List qualifiers) + { + if (!qualifiers.Any()) + return string.Empty; + + if (qualifiers.Count == 1) + return qualifiers.First(); + + return $"\"{string.Join(",", qualifiers)}\""; + } + + private static string SanitizeFilename(string s) + { + return Path.GetInvalidFileNameChars().Aggregate(s, (current, ch) => current.Replace(ch, '_')); + } + } +} diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/ExternalPointsReader.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/ExternalPointsReader.cs index e66c59f8..095d6b32 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/ExternalPointsReader.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/ExternalPointsReader.cs @@ -5,13 +5,15 @@ using Aquarius.TimeSeries.Client; using Aquarius.TimeSeries.Client.Helpers; using Aquarius.TimeSeries.Client.ServiceModels.Acquisition; +using Aquarius.TimeSeries.Client.ServiceModels.Legacy.Publish3x; using Aquarius.TimeSeries.Client.ServiceModels.Provisioning; -using Aquarius.TimeSeries.Client.ServiceModels.Publish; using Get3xCorrectedData = Aquarius.TimeSeries.Client.ServiceModels.Legacy.Publish3x.TimeSeriesDataCorrectedServiceRequest; using Get3xTimeSeriesDescription = Aquarius.TimeSeries.Client.ServiceModels.Legacy.Publish3x.TimeSeriesDescriptionServiceRequest; using NodaTime; using ServiceStack.Logging; using InterpolationType = Aquarius.TimeSeries.Client.ServiceModels.Provisioning.InterpolationType; +using TimeRange = Aquarius.TimeSeries.Client.ServiceModels.Publish.TimeRange; +using TimeSeriesDataCorrectedServiceRequest = Aquarius.TimeSeries.Client.ServiceModels.Publish.TimeSeriesDataCorrectedServiceRequest; namespace PointZilla { @@ -48,6 +50,8 @@ private List LoadPointsFromNg(IAquariusClient client) { var timeSeriesInfo = client.GetTimeSeriesInfo(Context.SourceTimeSeries.Identifier); + Log.Info($"Loading points from '{timeSeriesInfo.Identifier}' ..."); + var timeSeriesData = client.Publish.Get(new TimeSeriesDataCorrectedServiceRequest { TimeSeriesUniqueId = timeSeriesInfo.UniqueId, @@ -74,7 +78,6 @@ private List LoadPointsFromNg(IAquariusClient client) SetTimeSeriesCreationProperties( timeSeriesInfo, - timeSeriesInfo.UtcOffset, timeSeriesData.Methods.LastOrDefault()?.MethodCode, gapTolerance, interpolationType); @@ -102,7 +105,6 @@ private static IEnumerable GetManyMetadata(IEnumerable LoadPointsFrom3X(IAquariusClient client) { var timeSeriesDescription = client.Publish.Get(new Get3xTimeSeriesDescription @@ -148,12 +150,16 @@ private List LoadPointsFrom3X(IAquariusClient client) if (timeSeriesDescription == null) throw new ExpectedException($"Can't find '{Context.SourceTimeSeries.Identifier}' time-series in location '{Context.SourceTimeSeries.LocationIdentifier}'."); - var points = client.Publish.Get(new Get3xCorrectedData - { - TimeSeriesIdentifier = Context.SourceTimeSeries.Identifier, - QueryFrom = Context.SourceQueryFrom?.ToDateTimeOffset(), - QueryTo = Context.SourceQueryTo?.ToDateTimeOffset() - }) + Log.Info($"Loading points from '{timeSeriesDescription.Identifier}' ..."); + + var correctedData = client.Publish.Get(new Get3xCorrectedData + { + TimeSeriesIdentifier = Context.SourceTimeSeries.Identifier, + QueryFrom = Context.SourceQueryFrom?.ToDateTimeOffset(), + QueryTo = Context.SourceQueryTo?.ToDateTimeOffset() + }); + + var points = correctedData .Points .Select(p => new ReflectedTimeSeriesPoint { @@ -163,14 +169,27 @@ private List LoadPointsFrom3X(IAquariusClient client) }) .ToList(); - SetTimeSeriesCreationProperties(new TimeSeries + // 3.X Publish API's TimeSeriesDescription is missing some info, so grab those pieces from elsewhere + + // The time-range start will always be in the offset of the time-series, even when no points exist + var utcOffset = Offset.FromHoursAndMinutes(correctedData.TimeRange.StartTime.Offset.Hours, correctedData.TimeRange.StartTime.Offset.Minutes); + + // We can infer the interpolationType from the last point (if one exists) + var interpolationType = Context.InterpolationType ?? (correctedData.Points.Any() + ? (InterpolationType?)correctedData.Points.Last().Interpolation + : null); + + var timeSeries = new TimeSeries { + Identifier = Context.SourceTimeSeries.Identifier, Parameter = timeSeriesDescription.Parameter, Label = timeSeriesDescription.Label, Unit = timeSeriesDescription.Unit, Publish = timeSeriesDescription.Publish, Description = timeSeriesDescription.Description, Comment = timeSeriesDescription.Comment, + TimeSeriesType = KnownTimeSeriesTypes[timeSeriesDescription.TimeSeriesType], + UtcOffset = utcOffset, ComputationIdentifier = timeSeriesDescription.ComputationIdentifier, ComputationPeriodIdentifier = timeSeriesDescription.ComputationPeriodIdentifier, SubLocationIdentifier = timeSeriesDescription.SubLocationIdentifier, @@ -182,11 +201,27 @@ private List LoadPointsFrom3X(IAquariusClient client) Value = ea.Value.ToString() }) .ToList() - }); + }; + + SetTimeSeriesCreationProperties(timeSeries, interpolationType: interpolationType); Log.Info($"Loaded {points.Count} points from {Context.SourceTimeSeries.Identifier}"); return points; } + + private static readonly Dictionary KnownTimeSeriesTypes = + new Dictionary + { + { AtomType.TimeSeries_Basic, TimeSeriesType.ProcessorBasic}, + { AtomType.TimeSeries_Field_Visit, TimeSeriesType.External}, + { AtomType.TimeSeries_Composite, TimeSeriesType.ProcessorDerived}, + { AtomType.TimeSeries_Rating_Curve_Derived, TimeSeriesType.ProcessorDerived}, + { AtomType.TimeSeries_Calculated_Derived, TimeSeriesType.ProcessorDerived}, + { AtomType.TimeSeries_External, TimeSeriesType.External}, + { AtomType.TimeSeries_Statistical_Derived, TimeSeriesType.ProcessorDerived}, + { AtomType.TimeSeries_ProcessorBasic, TimeSeriesType.ProcessorBasic}, + { AtomType.TimeSeries_ProcessorDerived, TimeSeriesType.ProcessorDerived}, + }; } } diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/PointZilla.csproj b/TimeSeries/PublicApis/SdkExamples/PointZilla/PointZilla.csproj index 950f5077..d86a52ef 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/PointZilla.csproj +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/PointZilla.csproj @@ -87,6 +87,7 @@ + diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/PointsAppender.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/PointsAppender.cs index 9158a8dd..a6fdf93b 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/PointsAppender.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/PointsAppender.cs @@ -26,10 +26,21 @@ public PointsAppender(Context context) public void AppendPoints() { + Log.Info(Context.ExecutingFileVersion); + Points = GetPoints() .OrderBy(p => p.Time) .ToList(); + if (!string.IsNullOrEmpty(Context.SaveCsvPath)) + { + new CsvWriter(Context) + .WritePoints(Points); + + if (Context.StopAfterSavingCsv) + return; + } + Log.Info($"Connecting to {Context.Server} ..."); using (var client = AquariusClient.CreateConnectedClient(Context.Server, Context.Username, Context.Password)) diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/Program.cs b/TimeSeries/PublicApis/SdkExamples/PointZilla/Program.cs index 6714340b..cfadb0bd 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/Program.cs +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/Program.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; @@ -80,9 +81,21 @@ private static string GetProgramName() return Path.GetFileNameWithoutExtension(Assembly.GetEntryAssembly().Location); } + private static string GetExecutingFileVersion() + { + var assembly = Assembly.GetExecutingAssembly(); + var fileVersionInfo = FileVersionInfo.GetVersionInfo(assembly.Location); + + // ReSharper disable once PossibleNullReferenceException + return $"{MethodBase.GetCurrentMethod().DeclaringType.Namespace} v{fileVersionInfo.FileVersion}"; + } + private static Context ParseArgs(string[] args) { - var context = new Context(); + var context = new Context + { + ExecutingFileVersion = GetExecutingFileVersion() + }; SetNgCsvFormat(context); @@ -120,6 +133,7 @@ private static Context ParseArgs(string[] args) new Option {Key = nameof(context.ComputationIdentifier), Setter = value => context.ComputationIdentifier = value, Getter = () => context.ComputationIdentifier, Description = "Time-series computation identifier"}, new Option {Key = nameof(context.ComputationPeriodIdentifier), Setter = value => context.ComputationPeriodIdentifier = value, Getter = () => context.ComputationPeriodIdentifier, Description = "Time-series computation period identifier"}, new Option {Key = nameof(context.SubLocationIdentifier), Setter = value => context.SubLocationIdentifier = value, Getter = () => context.SubLocationIdentifier, Description = "Time-series sub-location identifier"}, + new Option {Key = nameof(context.TimeSeriesType), Setter = value => context.TimeSeriesType = ParseEnum(value), Getter = () => context.TimeSeriesType.ToString(), Description = $"Time-series type. {EnumOptions()}"}, new Option {Key = nameof(context.ExtendedAttributeValues), Setter = value => ParseExtendedAttributeValue(context, value), Getter = () => string.Empty, Description = "Extended attribute values in UPPERCASE_COLUMN_NAME@UPPERCASE_TABLE_NAME=value syntax. Can be set multiple times."}, new Option(), new Option {Description = "Copy points from another time-series:"}, @@ -150,7 +164,7 @@ private static Context ParseArgs(string[] args) new Option {Key = nameof(context.CsvIgnoreInvalidRows), Setter = value => context.CsvIgnoreInvalidRows = bool.Parse(value), Getter = () => context.CsvIgnoreInvalidRows.ToString(), Description = "Ignore CSV rows that can't be parsed"}, new Option {Key = nameof(context.CsvRealign), Setter = value => context.CsvRealign = bool.Parse(value), Getter = () => context.CsvRealign.ToString(), Description = $"Realign imported CSV points to the /{nameof(context.StartTime)} value"}, new Option {Key = nameof(context.CsvRemoveDuplicatePoints), Setter = value => context.CsvRemoveDuplicatePoints = bool.Parse(value), Getter = () => context.CsvRemoveDuplicatePoints.ToString(), Description = "Remove duplicate points in the CSV before appending."}, - new Option {Key = "CsvFormat", Description = "Shortcut for known CSV formats. One of 'NG' or '3X'. [default: NG]", Setter = + new Option {Key = "CsvFormat", Description = "Shortcut for known CSV formats. One of 'NG', '3X', or 'PointZilla'. [default: NG]", Setter = value => { if (value.Equals("Ng", StringComparison.InvariantCultureIgnoreCase)) @@ -161,11 +175,19 @@ private static Context ParseArgs(string[] args) { Set3XCsvFormat(context); } + else if (value.Equals("PointZilla", StringComparison.InvariantCultureIgnoreCase)) + { + CsvWriter.SetPointZillaCsvFormat(context); + } else { throw new ExpectedException($"'{value}' is an unknown CSV format."); } - }}, + }}, + + new Option(), new Option {Description = "CSV saving options:"}, + new Option {Key = nameof(context.SaveCsvPath), Setter = value => context.SaveCsvPath = value, Getter = () => context.SaveCsvPath, Description = "When set, saves the extracted/generated points to a CSV file. If only a directory is specified, an appropriate filename will be generated."}, + new Option {Key = nameof(context.StopAfterSavingCsv), Setter = value => context.StopAfterSavingCsv = bool.Parse(value), Getter = () => context.StopAfterSavingCsv.ToString(), Description = "When true, stop after saving a CSV file, before appending any points."}, }; var usageMessage @@ -242,11 +264,20 @@ var usageMessage option.Setter(value); } - if (string.IsNullOrWhiteSpace(context.Server)) - throw new ExpectedException($"A /{nameof(context.Server)} option is required.\n\n{usageMessage}"); + if (string.IsNullOrEmpty(context.Server) && string.IsNullOrEmpty(context.TimeSeries) && !string.IsNullOrEmpty(context.SaveCsvPath)) + context.StopAfterSavingCsv = true; + + if (context.SourceTimeSeries != null && string.IsNullOrEmpty(context.SourceTimeSeries.Server) && string.IsNullOrEmpty(context.Server)) + throw new ExpectedException($"A /{nameof(context.Server)} option is required to load the source time-series.\n\n{usageMessage}"); - if (string.IsNullOrWhiteSpace(context.TimeSeries)) - throw new ExpectedException($"A /{nameof(context.TimeSeries)} option is required.\n\n{usageMessage}"); + if (!context.StopAfterSavingCsv) + { + if (string.IsNullOrWhiteSpace(context.Server)) + throw new ExpectedException($"A /{nameof(context.Server)} option is required.\n\n{usageMessage}"); + + if (string.IsNullOrWhiteSpace(context.TimeSeries)) + throw new ExpectedException($"A /{nameof(context.TimeSeries)} option is required.\n\n{usageMessage}"); + } return context; } diff --git a/TimeSeries/PublicApis/SdkExamples/PointZilla/Readme.md b/TimeSeries/PublicApis/SdkExamples/PointZilla/Readme.md index c143c659..2b4f8728 100644 --- a/TimeSeries/PublicApis/SdkExamples/PointZilla/Readme.md +++ b/TimeSeries/PublicApis/SdkExamples/PointZilla/Readme.md @@ -182,7 +182,7 @@ The `/SourceTimeSeries=[otherserver]parameter.label@location` syntax can be used If different credentials are required for the other server, use the `[otherserver:username:password]parameter.label@location` syntax. ```sh -$ ./PointZilla.exe -server=myserver Stage.Label@MyLocation -sourcetimeseries=[otherserver]Stage.Working@OtherLocation +$ ./PointZilla.exe -server=myserver Stage.Label@MyLocation -sourcetimeseries="[otherserver]Stage.Working@OtherLocation" 13:31:57.829 INFO - Connected to otherserver (3.10.905.0) 13:31:58.501 INFO - Loaded 250 points from Stage.Working@OtherLocation @@ -193,6 +193,18 @@ $ ./PointZilla.exe -server=myserver Stage.Label@MyLocation -sourcetimeseries=[ot The source time-series system can be any AQTS system as far back as AQUARIUS Time-Series 3.8. +## Comparing the points in two different time-series + +Use the `/SaveCsvPath=` option to save the extracted points to a CSV file, and then use standard text differencing tools to see if anything is different. + +Here is a bash script which compares the saved CSV output of two time-series. This only compares the corrected points, but that is usually a good indicator of "sameness". + +```sh +$ ./PointZilla.exe -saveCsvPath=system1 -sourceTimeSeries="[old3xServer]Stage.Primary@Location1" +$ ./PointZilla.exe -saveCsvPath=system2 -sourceTimeSeries="[newNgServer]Stage.Primary@Location1" +$ diff system1/Stage.Primary@Location1.EntireRecord.csv system2/Stage.Primary@Location1.EntireRecord.csv && echo "Time-series are identical." || echo "Nope, they are different" +``` + ## Deleting all points in a time-series The `DeleteAllPoints` command can be used to delete the entire record of point values from a time-series. @@ -241,14 +253,14 @@ Supported -option=value settings (/option=value works too): -Wait Wait for the append request to complete [default: True] -AppendTimeout Timeout period for append completion, in .NET TimeSpan format. -BatchSize Maximum number of points to send in a single append request [default: 500000] - + ========================= Time-series options: -TimeSeries Target time-series identifier or unique ID -TimeRange Time-range for overwrite in ISO8061/ISO8601 (defaults to start/end points) -Command Append operation to perform. One of Auto, Append, OverwriteAppend, Reflected, DeleteAllPoints. [default: Auto] -GradeCode Optional grade code for all appended points -Qualifiers Optional qualifier list for all appended points - + ========================= Time-series creation options: -CreateMode Mode for creating missing time-series. One of Never, Basic, Reflected. [default: Never] -GapTolerance Gap tolerance for newly-created time-series. [default: "MaxDuration"] @@ -262,14 +274,14 @@ Supported -option=value settings (/option=value works too): -ComputationIdentifier Time-series computation identifier -ComputationPeriodIdentifier Time-series computation period identifier -SubLocationIdentifier Time-series sub-location identifier + -TimeSeriesType Time-series type. One of Unknown, ProcessorBasic, ProcessorDerived, External, Reflected. -ExtendedAttributeValues Extended attribute values in UPPERCASE_COLUMN_NAME@UPPERCASE_TABLE_NAME=value syntax. Can be set multiple times. - - + ========================= Copy points from another time-series: -SourceTimeSeries Source time-series to copy. Prefix with [server2] or [server2:username2:password2] to copy from another server -SourceQueryFrom Start time of extracted points in ISO8601 format. -SourceQueryTo End time of extracted points - + ========================= Point-generator options: -StartTime Start time of generated points, in ISO8601 format. [default: the current time] -PointInterval Interval between generated points, in .NET TimeSpan format. [default: 00:01:00] @@ -280,7 +292,7 @@ Supported -option=value settings (/option=value works too): -WaveformPhase Phase within one waveform period [default: 0] -WaveformScalar Scale the waveform by this amount [default: 1] -WaveformPeriod Waveform period before repeating [default: 1440] - + ========================= CSV parsing options: -CSV Parse the CSV file -CsvTimeField CSV column index for timestamps [default: 1] @@ -293,7 +305,11 @@ Supported -option=value settings (/option=value works too): -CsvIgnoreInvalidRows Ignore CSV rows that can't be parsed [default: True] -CsvRealign Realign imported CSV points to the /StartTime value [default: False] -CsvRemoveDuplicatePoints Remove duplicate points in the CSV before appending. [default: True] - -CsvFormat Shortcut for known CSV formats. One of 'NG' or '3X'. [default: NG] + -CsvFormat Shortcut for known CSV formats. One of 'NG', '3X', or 'PointZilla'. [default: NG] + + ========================= CSV saving options: + -SaveCsvPath When set, saves the extracted/generated points to a CSV file. If only a directory is specified, an appropriate filename will be generated. + -StopAfterSavingCsv When true, stop after saving a CSV file, before appending any points. [default: False] Use the @optionsFile syntax to read more options from a file.