-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathAnyMetaInitializeUpdateBenchmark.cs
More file actions
125 lines (116 loc) · 4.16 KB
/
AnyMetaInitializeUpdateBenchmark.cs
File metadata and controls
125 lines (116 loc) · 4.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
using BenchmarkDotNet.Attributes;
using Microsoft.VisualBasic;
using NMF.AnyText;
using NMF.AnyText.AnyMeta;
using NMF.AnyText.Grammars;
using NMF.AnyText.Rules;
namespace AnyText.PerformanceTests
{
[SimpleJob(iterationCount: 50)]
[MarkdownExporter]
[HtmlExporter]
[RPlotExporter]
[CsvExporter]
public class AnyMetaInitializeUpdateBenchmark
{
public static IEnumerable<string> Models => new[] { "schema", "KDM", "NMeta", "61850", "COSEM" };
private string[] _lines;
private readonly Grammar _anyMetaGrammar = new AnyMetaGrammar();
private TokenInfo[] _tokenInfo;
private Parser _parser;
private readonly Random _random = new Random(42);
private TokenInfo[] _tokensInOrder;
[ParamsSource(nameof(Models))]
public string? Model { get; set; }
[Params(1, 10, 100)]
public int TokenChanges { get; set; }
public int TokenOffset { get; set; }
[GlobalSetup]
public void LoadText()
{
_lines = File.ReadAllLines($"{Model}.anymeta");
_anyMetaGrammar.Initialize();
var tokenSource = File.ReadAllLines($"{Model}.tokens");
_tokenInfo = new TokenInfo[500];
for (int i = 0; i < 500; i++)
{
_tokenInfo[i] = TokenInfo.FromString(tokenSource[i]);
}
_parser = _anyMetaGrammar.CreateParser();
_parser.Initialize(_lines, skipValidation: true);
}
[IterationSetup]
public void NextIteration()
{
TokenOffset = _random.Next(400);
_tokensInOrder = _tokenInfo.Skip(TokenOffset).Take(TokenChanges).OrderBy(t => (t.line, t.col)).ToArray();
var lastLine = -1;
var accumulatedLen = 0;
for (int i = 0; i < TokenChanges; i++)
{
var token = _tokensInOrder[i];
if (token.line == lastLine)
{
_tokensInOrder[i] = new TokenInfo(token.token, token.line, token.col - accumulatedLen);
accumulatedLen += token.token.Length;
}
else
{
lastLine = token.line;
accumulatedLen = token.token.Length;
}
}
}
/// <summary>
/// Remove all tokens, then reinitialize the parser (parse from scratch), restore tokens, reinitialize again
/// </summary>
[Benchmark]
public void ReInitialize()
{
for (int i = 0; i < TokenChanges; i++)
{
_tokensInOrder[i].Remove(_lines);
}
_parser.Initialize(_lines, skipValidation: true);
for (int i = TokenChanges - 1; i >= 0; i--)
{
_tokensInOrder[i].Insert(_lines);
}
_parser.Initialize(_lines, skipValidation: true);
}
/// <summary>
/// Remove all tokens, propagate all changes at once, restore tokens, propagate changes again
/// </summary>
[Benchmark]
public void Update()
{
var updates = new List<TextEdit>();
for (int i = 0; i < TokenChanges; i++)
{
updates.Add(_tokensInOrder[i].AsRemoveTextEdit());
}
_parser.Update(updates, skipValidation: true);
updates.Clear();
for (int i = TokenChanges - 1; i >= 0; i--)
{
updates.Add(_tokensInOrder[i].AsTextEdit());
}
_parser.Update(updates, skipValidation: true);
}
/// <summary>
/// Remove all tokens, propagate removal after every change, restore each token and update again
/// </summary>
[Benchmark]
public void UpdateAlways()
{
for (int i = 0; i < TokenChanges; i++)
{
_parser.Update(_tokensInOrder[i].AsRemoveTextEdit(), skipValidation: true);
}
for (int i = TokenChanges - 1; i >= 0; i--)
{
_parser.Update(_tokensInOrder[i].AsTextEdit(), skipValidation: true);
}
}
}
}