Skip to content

Commit b25168b

Browse files
oliviagolden0kramvan1
authored andcommitted
test update
1 parent eb7a239 commit b25168b

1 file changed

Lines changed: 39 additions & 26 deletions

File tree

v3enginetest/main.go

Lines changed: 39 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,17 @@ var (
2020
)
2121

2222
const (
23-
dataPath = "/rulesEngine/data/:id"
24-
blockPath = "/rulesEngine/block/:id"
25-
donePath = "/rulesEngine/done/:id"
26-
doneCrawlerPath = "/rulesEngineCrawler/done/:id"
27-
doneRuleID = "done"
28-
doneRuleIDCrawler = "doneCrawler"
29-
doneID = "4567"
30-
doneIDCrawler = "8910"
23+
dataPath = "/rulesEngine/data/:id"
24+
blockPath = "/rulesEngine/block/:id"
25+
donePath = "/rulesEngine/done/:id"
26+
doneCrawlerPathLow = "/rulesEngineCrawlerLow/done/:id"
27+
doneCrawlerPathHigh = "/rulesEngineCrawlerHigh/done/:id"
28+
doneRuleID = "done"
29+
doneRuleIDCrawlerLow = "doneCrawlerLow"
30+
doneRuleIDCrawlerHigh = "doneCrawlerHigh"
31+
doneID = "4567"
32+
doneIDCrawlerLow = "8910"
33+
doneIDCrawlerHigh = "1112"
3134
)
3235

3336
type polled struct {
@@ -100,7 +103,7 @@ func main() {
100103
engine := rules.NewV3Engine(cfg, logger,
101104
rules.EngineContextProvider(cpFunc),
102105
rules.EngineMetricsCollector(mFunc),
103-
rules.EngineSyncInterval(5),
106+
rules.EngineSyncInterval(10),
104107
rules.EngineCrawlMutex("inttest", 5),
105108
rules.EngineLockAcquisitionTimeout(5))
106109
mw := &rules.MockWatcherWrapper{
@@ -174,19 +177,19 @@ func main() {
174177
doneFalse := "false"
175178
doneRule, err := rules.NewEqualsLiteralRule(donePath, &doneFalse)
176179
check(err)
177-
doneCrawlerRule, err := rules.NewEqualsLiteralRule(doneCrawlerPath, &doneFalse)
178-
check(err)
179180
engine.AddRule(doneRule, "/rulesEngineDone/:id", func(task *rules.V3RuleTask) {
180181
path := task.Attr.Format(donePath)
181182
doneTrue := "true"
182183
_, err := kv.Put(task.Context, path, doneTrue)
183184
check(err)
184185
}, rules.RuleID(doneRuleID))
185186

187+
doneCrawlerRuleLow, err := rules.NewEqualsLiteralRule(doneCrawlerPathLow, &doneFalse)
188+
check(err)
186189
// create a no priority crawler only rule
187190
highPriorityCalled := false
188-
engine.AddRule(doneCrawlerRule, "/rulesEngineCrawlerDone/:id", func(task *rules.V3RuleTask) {
189-
path := task.Attr.Format(doneCrawlerPath)
191+
engine.AddRule(doneCrawlerRuleLow, "/rulesEngineCrawlerDoneLow/:id", func(task *rules.V3RuleTask) {
192+
path := task.Attr.Format(doneCrawlerPathLow)
190193
if task.Metadata["source"] != "crawler" {
191194
panic("Crawler only rule not processed by the crawler")
192195
} else if !highPriorityCalled {
@@ -195,19 +198,21 @@ func main() {
195198
doneTrue := "true"
196199
_, err := kv.Put(task.Context, path, doneTrue)
197200
check(err)
198-
}, rules.RuleID(doneRuleIDCrawler), rules.CrawlerOnly())
201+
}, rules.RuleID(doneRuleIDCrawlerLow), rules.CrawlerOnly())
199202

203+
doneCrawlerRuleHigh, err := rules.NewEqualsLiteralRule(doneCrawlerPathHigh, &doneFalse)
204+
check(err)
200205
// create a high priority crawler only rule
201-
engine.AddRule(doneCrawlerRule, "/rulesEngineCrawlerDone/:id", func(task *rules.V3RuleTask) {
202-
path := task.Attr.Format(doneCrawlerPath)
206+
engine.AddRule(doneCrawlerRuleHigh, "/rulesEngineCrawlerDoneHigh/:id", func(task *rules.V3RuleTask) {
207+
highPriorityCalled = true
208+
path := task.Attr.Format(doneCrawlerPathHigh)
203209
if task.Metadata["source"] != "crawler" {
204210
panic("Crawler only rule not processed by the crawler")
205211
}
206-
highPriorityCalled = true
207212
doneTrue := "true"
208213
_, err := kv.Put(task.Context, path, doneTrue)
209214
check(err)
210-
}, rules.RuleID(doneRuleIDCrawler), rules.CrawlerOnly(), rules.Priority(100))
215+
}, rules.RuleID(doneRuleIDCrawlerHigh), rules.CrawlerOnly(), rules.Priority(100))
211216

212217
engine.Run()
213218
time.Sleep(time.Second)
@@ -231,20 +236,28 @@ func main() {
231236
_, err = kv.Put(context.Background(), strings.Replace(donePath, ":id", doneID, 1), doneFalse)
232237
check(err)
233238

234-
// Trigger the done crawler rule
235-
_, err = kv.Put(context.Background(), strings.Replace(doneCrawlerPath, ":id", doneIDCrawler, 1), doneFalse)
239+
// Trigger the done crawler low-priority rule
240+
_, err = kv.Put(context.Background(), strings.Replace(doneCrawlerPathLow, ":id", doneIDCrawlerLow, 1), doneFalse)
241+
check(err)
242+
243+
// Trigger the done crawler high-priority rule
244+
_, err = kv.Put(context.Background(), strings.Replace(doneCrawlerPathHigh, ":id", doneIDCrawlerHigh, 1), doneFalse)
236245
check(err)
237246

238247
// Verify that it ran
239-
tenSecCtx1, cancel := context.WithTimeout(context.Background(), 10*time.Second)
240-
defer cancel()
248+
tenSecCtx1, cancel1 := context.WithTimeout(context.Background(), 10*time.Second)
249+
defer cancel1()
241250
err = cbHandler.WaitForCallback(tenSecCtx1, doneRuleID, map[string]string{"id": doneID})
242251
check(err)
243252

244-
// Verify the crawler rule ran
245-
tenSecCtx2, cancel := context.WithTimeout(context.Background(), 10*time.Second)
246-
defer cancel()
247-
err = cbHandler.WaitForCallback(tenSecCtx2, doneRuleIDCrawler, map[string]string{"id": doneIDCrawler})
253+
// Verify the crawler rules ran
254+
tenSecCtx2, cancel2 := context.WithTimeout(context.Background(), 10*time.Second)
255+
defer cancel2()
256+
err = cbHandler.WaitForCallback(tenSecCtx2, doneRuleIDCrawlerHigh, map[string]string{"id": doneIDCrawlerHigh})
257+
check(err)
258+
tenSecCtx3, cancel3 := context.WithTimeout(context.Background(), 10*time.Second)
259+
defer cancel3()
260+
err = cbHandler.WaitForCallback(tenSecCtx3, doneRuleIDCrawlerLow, map[string]string{"id": doneIDCrawlerLow})
248261
check(err)
249262

250263
_ = engine.Shutdown(ctx) // #nosec G104 -- For testing only

0 commit comments

Comments
 (0)