Add backfill scenario to scale tests (#1339)

* Implement backfill querying

* Update location for stable and incubator charts

* Add MMF backfill example

* Simplify MMF backfill example

* Add backfill scenario to scale tests

* Update backfill scenario

* Improve backfill scenario

Co-authored-by: Alexander Apalikov <alexander.apalikov@globant.com>
This commit is contained in:
yeukovichd
2021-04-14 00:17:52 +03:00
committed by GitHub
parent cc08f39205
commit 3fa588c1f8
9 changed files with 864 additions and 204 deletions

View File

@ -636,8 +636,8 @@ delete-kind-cluster: build/toolchain/bin/kind$(EXE_EXTENSION) build/toolchain/bi
create-cluster-role-binding:
$(KUBECTL) create clusterrolebinding myname-cluster-admin-binding --clusterrole=cluster-admin --user=$(GCLOUD_ACCOUNT_EMAIL)
create-gke-cluster: GKE_VERSION = 1.15.12-gke.20 # gcloud beta container get-server-config --zone us-west1-a
create-gke-cluster: GKE_CLUSTER_SHAPE_FLAGS = --machine-type n1-standard-4 --enable-autoscaling --min-nodes 1 --num-nodes 2 --max-nodes 10 --disk-size 50
create-gke-cluster: GKE_VERSION = 1.15.12-gke.6002 # gcloud beta container get-server-config --zone us-west1-a
create-gke-cluster: GKE_CLUSTER_SHAPE_FLAGS = --machine-type n1-standard-8 --enable-autoscaling --min-nodes 1 --num-nodes 6 --max-nodes 10 --disk-size 50
create-gke-cluster: GKE_FUTURE_COMPAT_FLAGS = --no-enable-basic-auth --no-issue-client-certificate --enable-ip-alias --metadata disable-legacy-endpoints=true --enable-autoupgrade
create-gke-cluster: build/toolchain/bin/kubectl$(EXE_EXTENSION) gcloud
$(GCLOUD) beta $(GCP_PROJECT_FLAG) container clusters create $(GKE_CLUSTER_NAME) $(GCP_LOCATION_FLAG) $(GKE_CLUSTER_SHAPE_FLAGS) $(GKE_FUTURE_COMPAT_FLAGS) $(GKE_CLUSTER_FLAGS) \

View File

@ -40,16 +40,16 @@ var (
activeScenario = scenarios.ActiveScenario
mIterations = telemetry.Counter("scale_backend_iterations", "fetch match iterations")
mFetchMatchCalls = telemetry.Counter("scale_backend_fetch_match_calls", "fetch match calls")
mFetchMatchSuccesses = telemetry.Counter("scale_backend_fetch_match_successes", "fetch match successes")
mFetchMatchErrors = telemetry.Counter("scale_backend_fetch_match_errors", "fetch match errors")
mMatchesReturned = telemetry.Counter("scale_backend_matches_returned", "matches returned")
mSumTicketsReturned = telemetry.Counter("scale_backend_sum_tickets_returned", "tickets in matches returned")
mMatchesAssigned = telemetry.Counter("scale_backend_matches_assigned", "matches assigned")
mMatchAssignsFailed = telemetry.Counter("scale_backend_match_assigns_failed", "match assigns failed")
mTicketsDeleted = telemetry.Counter("scale_backend_tickets_deleted", "tickets deleted")
mTicketDeletesFailed = telemetry.Counter("scale_backend_ticket_deletes_failed", "ticket deletes failed")
mIterations = telemetry.Counter("scale_backend_iterations", "fetch match iterations")
mFetchMatchCalls = telemetry.Counter("scale_backend_fetch_match_calls", "fetch match calls")
mFetchMatchSuccesses = telemetry.Counter("scale_backend_fetch_match_successes", "fetch match successes")
mFetchMatchErrors = telemetry.Counter("scale_backend_fetch_match_errors", "fetch match errors")
mMatchesReturned = telemetry.Counter("scale_backend_matches_returned", "matches returned")
mSumTicketsReturned = telemetry.Counter("scale_backend_sum_tickets_returned", "tickets in matches returned")
mMatchesAssigned = telemetry.Counter("scale_backend_matches_assigned", "matches assigned")
mMatchAssignsFailed = telemetry.Counter("scale_backend_match_assigns_failed", "match assigns failed")
mBackfillsDeleted = telemetry.Counter("scale_backend_backfills_deleted", "backfills deleted")
mBackfillDeletesFailed = telemetry.Counter("scale_backend_backfill_deletes_failed", "backfill deletes failed")
)
// Run triggers execution of functions that continuously fetch, assign and
@ -79,12 +79,28 @@ func run(cfg config.View) {
w := logger.Writer()
defer w.Close()
matchesForAssignment := make(chan *pb.Match, 30000)
ticketsForDeletion := make(chan string, 30000)
matchesToAssign := make(chan *pb.Match, 30000)
for i := 0; i < 50; i++ {
go runAssignments(be, matchesForAssignment, ticketsForDeletion)
go runDeletions(fe, ticketsForDeletion)
if activeScenario.BackendAssignsTickets {
for i := 0; i < 100; i++ {
go runAssignments(be, matchesToAssign)
}
}
backfillsToDelete := make(chan *pb.Backfill, 30000)
if activeScenario.BackendDeletesBackfills {
for i := 0; i < 100; i++ {
go runDeleteBackfills(fe, backfillsToDelete)
}
}
matchesToAcknowledge := make(chan *pb.Match, 30000)
if activeScenario.BackendAcknowledgesBackfills {
for i := 0; i < 100; i++ {
go runAcknowledgeBackfills(fe, matchesToAcknowledge, backfillsToDelete)
}
}
// Don't go faster than this, as it likely means that FetchMatches is throwing
@ -98,7 +114,7 @@ func run(cfg config.View) {
wg.Add(1)
go func(wg *sync.WaitGroup, p *pb.MatchProfile) {
defer wg.Done()
runFetchMatches(be, p, matchesForAssignment)
runFetchMatches(be, p, matchesToAssign, matchesToAcknowledge)
}(&wg, p)
}
@ -108,13 +124,13 @@ func run(cfg config.View) {
}
}
func runFetchMatches(be pb.BackendServiceClient, p *pb.MatchProfile, matchesForAssignment chan<- *pb.Match) {
func runFetchMatches(be pb.BackendServiceClient, p *pb.MatchProfile, matchesToAssign chan<- *pb.Match, matchesToAcknowledge chan<- *pb.Match) {
ctx, span := trace.StartSpan(context.Background(), "scale.backend/FetchMatches")
defer span.End()
req := &pb.FetchMatchesRequest{
Config: &pb.FunctionConfig{
Host: "om-function",
Host: "open-match-function",
Port: 50502,
Type: pb.FunctionConfig_GRPC,
},
@ -146,62 +162,90 @@ func runFetchMatches(be pb.BackendServiceClient, p *pb.MatchProfile, matchesForA
telemetry.RecordNUnitMeasurement(ctx, mSumTicketsReturned, int64(len(resp.GetMatch().Tickets)))
telemetry.RecordUnitMeasurement(ctx, mMatchesReturned)
matchesForAssignment <- resp.GetMatch()
if activeScenario.BackendAssignsTickets {
matchesToAssign <- resp.GetMatch()
}
if activeScenario.BackendAcknowledgesBackfills {
matchesToAcknowledge <- resp.GetMatch()
}
}
}
func runAssignments(be pb.BackendServiceClient, matchesForAssignment <-chan *pb.Match, ticketsForDeletion chan<- string) {
func runDeleteBackfills(fe pb.FrontendServiceClient, backfillsToDelete <-chan *pb.Backfill) {
for b := range backfillsToDelete {
if !activeScenario.BackfillDeleteCond(b) {
continue
}
ctx := context.Background()
_, err := fe.DeleteBackfill(ctx, &pb.DeleteBackfillRequest{BackfillId: b.Id})
if err != nil {
logger.WithError(err).Errorf("failed to delete backfill: %s", b.Id)
telemetry.RecordUnitMeasurement(ctx, mBackfillDeletesFailed)
} else {
telemetry.RecordUnitMeasurement(ctx, mBackfillsDeleted)
}
}
}
func runAcknowledgeBackfills(fe pb.FrontendServiceClient, matchesToAcknowledge <-chan *pb.Match, backfillsToDelete chan<- *pb.Backfill) {
for m := range matchesToAcknowledge {
backfillId := m.Backfill.GetId()
if backfillId == "" {
continue
}
err := acknowledgeBackfill(fe, backfillId)
if err != nil {
logger.WithError(err).Errorf("failed to acknowledge backfill: %s", backfillId)
continue
}
if activeScenario.BackendDeletesBackfills {
backfillsToDelete <- m.Backfill
}
}
}
func acknowledgeBackfill(fe pb.FrontendServiceClient, backfillId string) error {
ctx, span := trace.StartSpan(context.Background(), "scale.frontend/AcknowledgeBackfill")
defer span.End()
_, err := fe.AcknowledgeBackfill(ctx, &pb.AcknowledgeBackfillRequest{
BackfillId: backfillId,
Assignment: &pb.Assignment{
Connection: fmt.Sprintf("%d.%d.%d.%d:2222", rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256)),
},
})
return err
}
func runAssignments(be pb.BackendServiceClient, matchesToAssign <-chan *pb.Match) {
ctx := context.Background()
for m := range matchesForAssignment {
for m := range matchesToAssign {
ids := []string{}
for _, t := range m.Tickets {
ids = append(ids, t.GetId())
}
if activeScenario.BackendAssignsTickets {
_, err := be.AssignTickets(context.Background(), &pb.AssignTicketsRequest{
Assignments: []*pb.AssignmentGroup{
{
TicketIds: ids,
Assignment: &pb.Assignment{
Connection: fmt.Sprintf("%d.%d.%d.%d:2222", rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256)),
},
_, err := be.AssignTickets(context.Background(), &pb.AssignTicketsRequest{
Assignments: []*pb.AssignmentGroup{
{
TicketIds: ids,
Assignment: &pb.Assignment{
Connection: fmt.Sprintf("%d.%d.%d.%d:2222", rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256)),
},
},
})
if err != nil {
telemetry.RecordUnitMeasurement(ctx, mMatchAssignsFailed)
logger.WithError(err).Error("failed to assign tickets")
continue
}
telemetry.RecordUnitMeasurement(ctx, mMatchesAssigned)
},
})
if err != nil {
telemetry.RecordUnitMeasurement(ctx, mMatchAssignsFailed)
logger.WithError(err).Error("failed to assign tickets")
continue
}
for _, id := range ids {
ticketsForDeletion <- id
}
}
}
func runDeletions(fe pb.FrontendServiceClient, ticketsForDeletion <-chan string) {
ctx := context.Background()
for id := range ticketsForDeletion {
if activeScenario.BackendDeletesTickets {
req := &pb.DeleteTicketRequest{
TicketId: id,
}
_, err := fe.DeleteTicket(context.Background(), req)
if err == nil {
telemetry.RecordUnitMeasurement(ctx, mTicketsDeleted)
} else {
telemetry.RecordUnitMeasurement(ctx, mTicketDeletesFailed)
logger.WithError(err).Error("failed to delete tickets")
}
}
telemetry.RecordUnitMeasurement(ctx, mMatchesAssigned)
}
}

View File

@ -38,12 +38,22 @@ var (
})
activeScenario = scenarios.ActiveScenario
mTicketsCreated = telemetry.Counter("scale_frontend_tickets_created", "tickets created")
mTicketCreationsFailed = telemetry.Counter("scale_frontend_ticket_creations_failed", "tickets created")
mRunnersWaiting = concurrentGauge(telemetry.Gauge("scale_frontend_runners_waiting", "runners waiting"))
mRunnersCreating = concurrentGauge(telemetry.Gauge("scale_frontend_runners_creating", "runners creating"))
mTicketsCreated = telemetry.Counter("scale_frontend_tickets_created", "tickets created")
mTicketCreationsFailed = telemetry.Counter("scale_frontend_ticket_creations_failed", "tickets created")
mRunnersWaiting = concurrentGauge(telemetry.Gauge("scale_frontend_runners_waiting", "runners waiting"))
mRunnersCreating = concurrentGauge(telemetry.Gauge("scale_frontend_runners_creating", "runners creating"))
mTicketsDeleted = telemetry.Counter("scale_frontend_tickets_deleted", "tickets deleted")
mTicketDeletesFailed = telemetry.Counter("scale_frontend_ticket_deletes_failed", "ticket deletes failed")
mBackfillsCreated = telemetry.Counter("scale_frontend_backfills_created", "backfills_created")
mBackfillCreationsFailed = telemetry.Counter("scale_frontend_backfill_creations_failed", "backfill creations failed")
mTicketsTimeToAssignment = telemetry.HistogramWithBounds("scale_frontend_tickets_time_to_assignment", "tickets time to assignment", stats.UnitMilliseconds, []float64{0.01, 0.05, 0.1, 0.3, 0.6, 0.8, 1, 2, 3, 4, 5, 6, 8, 10, 13, 16, 20, 25, 30, 40, 50, 65, 80, 100, 130, 160, 200, 250, 300, 400, 500, 650, 800, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 200000, 500000, 1000000})
)
type ticketToWatch struct {
id string
createdAt time.Time
}
// Run triggers execution of the scale frontend component that creates
// tickets at scale in Open Match.
func BindService(p *appmain.Params, b *appmain.Bindings) error {
@ -61,9 +71,12 @@ func run(cfg config.View) {
}
fe := pb.NewFrontendServiceClient(conn)
if activeScenario.FrontendCreatesBackfillsOnStart {
createBackfills(fe, activeScenario.FrontendTotalBackfillsToCreate)
}
ticketQPS := int(activeScenario.FrontendTicketCreatedQPS)
ticketTotal := activeScenario.FrontendTotalTicketsToCreate
totalCreated := 0
for range time.Tick(time.Second) {
@ -89,13 +102,27 @@ func runner(fe pb.FrontendServiceClient) {
time.Sleep(time.Duration(rand.Int63n(int64(time.Second))))
g.start(mRunnersCreating)
createdAt := time.Now()
id, err := createTicket(ctx, fe)
if err != nil {
logger.WithError(err).Error("failed to create a ticket")
return
}
_ = id
err = watchAssignments(ctx, fe, ticketToWatch{id: id, createdAt: createdAt})
if err != nil {
logger.WithError(err).Errorf("failed to get ticket assignment: %s", id)
} else {
ms := time.Since(createdAt).Nanoseconds() / 1e6
stats.Record(ctx, mTicketsTimeToAssignment.M(ms))
}
if activeScenario.FrontendDeletesTickets {
err = deleteTicket(ctx, fe, id)
if err != nil {
logger.WithError(err).Errorf("failed to delete ticket: %s", id)
}
}
}
func createTicket(ctx context.Context, fe pb.FrontendServiceClient) (string, error) {
@ -116,6 +143,68 @@ func createTicket(ctx context.Context, fe pb.FrontendServiceClient) (string, err
return resp.Id, nil
}
func watchAssignments(ctx context.Context, fe pb.FrontendServiceClient, ticket ticketToWatch) error {
ctx, cancel := context.WithCancel(ctx)
defer cancel()
stream, err := fe.WatchAssignments(ctx, &pb.WatchAssignmentsRequest{TicketId: ticket.id})
if err != nil {
return err
}
var a *pb.Assignment
for a.GetConnection() == "" {
resp, err := stream.Recv()
if err != nil {
return err
}
a = resp.Assignment
}
return nil
}
func createBackfills(fe pb.FrontendServiceClient, numBackfillsToCreate int) error {
for i := 0; i < numBackfillsToCreate; i++ {
err := createBackfill(fe)
if err != nil {
return err
}
}
return nil
}
func createBackfill(fe pb.FrontendServiceClient) error {
ctx, span := trace.StartSpan(context.Background(), "scale.frontend/CreateBackfill")
defer span.End()
req := pb.CreateBackfillRequest{
Backfill: activeScenario.Backfill(),
}
_, err := fe.CreateBackfill(ctx, &req)
if err != nil {
telemetry.RecordUnitMeasurement(ctx, mBackfillCreationsFailed)
logger.WithError(err).Error("failed to create backfill")
return err
}
telemetry.RecordUnitMeasurement(ctx, mBackfillsCreated)
return nil
}
func deleteTicket(ctx context.Context, fe pb.FrontendServiceClient, ticketId string) error {
_, err := fe.DeleteTicket(ctx, &pb.DeleteTicketRequest{TicketId: ticketId})
if err != nil {
telemetry.RecordUnitMeasurement(ctx, mTicketDeletesFailed)
} else {
telemetry.RecordUnitMeasurement(ctx, mTicketsDeleted)
}
return err
}
// Allows concurrent moficiation of a gauge value by modifying the concurrent
// value with a delta.
func concurrentGauge(s *stats.Int64Measure) func(delta int64) {

View File

@ -0,0 +1,271 @@
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package backfill
import (
"fmt"
"io"
"time"
"github.com/golang/protobuf/ptypes"
"github.com/golang/protobuf/ptypes/any"
"github.com/golang/protobuf/ptypes/wrappers"
"open-match.dev/open-match/pkg/pb"
)
const (
poolName = "all"
openSlotsKey = "open-slots"
)
func Scenario() *BackfillScenario {
ticketsPerMatch := 4
return &BackfillScenario{
TicketsPerMatch: ticketsPerMatch,
MaxTicketsPerNotFullMatch: 3,
BackfillDeleteCond: func(b *pb.Backfill) bool {
openSlots := getOpenSlots(b, ticketsPerMatch)
return openSlots <= 0
},
}
}
type BackfillScenario struct {
TicketsPerMatch int
MaxTicketsPerNotFullMatch int
BackfillDeleteCond func(*pb.Backfill) bool
}
func (s *BackfillScenario) Profiles() []*pb.MatchProfile {
return []*pb.MatchProfile{
{
Name: "entirePool",
Pools: []*pb.Pool{
{
Name: poolName,
},
},
},
}
}
func (s *BackfillScenario) Ticket() *pb.Ticket {
return &pb.Ticket{}
}
func (s *BackfillScenario) Backfill() *pb.Backfill {
return &pb.Backfill{}
}
func (s *BackfillScenario) MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
return statefullMMF(p, poolBackfills, poolTickets, s.TicketsPerMatch, s.MaxTicketsPerNotFullMatch)
}
// statefullMMF is a MMF implementation which is used in scenario when we want MMF to create not full match and fill it later.
// 1. The first FetchMatches is called
// 2. MMF grabs maxTicketsPerNotFullMatch tickets and makes a match and new backfill for it
// 3. MMF sets backfill's open slots to ticketsPerMatch - maxTicketsPerNotFullMatch
// 4. MMF returns the match as a result
// 5. The second FetchMatches is called
// 6. MMF gets previously created backfill
// 7. MMF gets backfill's open slots value
// 8. MMF grabs openSlots tickets and makes a match with previously created backfill
// 9. MMF sets backfill's open slots to 0
// 10. MMF returns the match as a result
func statefullMMF(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket, ticketsPerMatch int, maxTicketsPerNotFullMatch int) ([]*pb.Match, error) {
var matches []*pb.Match
for pool, backfills := range poolBackfills {
tickets, ok := poolTickets[pool]
if !ok || len(tickets) == 0 {
// no tickets in pool
continue
}
// process backfills first
for _, b := range backfills {
l := len(tickets)
if l == 0 {
// no tickets left
break
}
openSlots := getOpenSlots(b, ticketsPerMatch)
if openSlots <= 0 {
// no free open slots
continue
}
if l > openSlots {
l = openSlots
}
setOpenSlots(b, openSlots-l)
matches = append(matches, &pb.Match{
MatchId: fmt.Sprintf("profile-%v-time-%v-%v", p.GetName(), time.Now().Format("2006-01-02T15:04:05.00"), len(matches)),
Tickets: tickets[0:l],
MatchProfile: p.GetName(),
MatchFunction: "backfill",
Backfill: b,
})
tickets = tickets[l:]
}
// create not full matches with backfill
for {
l := len(tickets)
if l == 0 {
// no tickets left
break
}
if l > maxTicketsPerNotFullMatch {
l = maxTicketsPerNotFullMatch
}
b := pb.Backfill{}
setOpenSlots(&b, ticketsPerMatch-l)
matches = append(matches, &pb.Match{
MatchId: fmt.Sprintf("profile-%v-time-%v-%v", p.GetName(), time.Now().Format("2006-01-02T15:04:05.00"), len(matches)),
Tickets: tickets[0:l],
MatchProfile: p.GetName(),
MatchFunction: "backfill",
Backfill: &b,
AllocateGameserver: true,
})
tickets = tickets[l:]
}
}
return matches, nil
}
func getOpenSlots(b *pb.Backfill, defaultVal int) int {
if b.Extensions == nil {
return defaultVal
}
any, ok := b.Extensions[openSlotsKey]
if !ok {
return defaultVal
}
var val wrappers.Int32Value
err := ptypes.UnmarshalAny(any, &val)
if err != nil {
panic(err)
}
return int(val.Value)
}
func setOpenSlots(b *pb.Backfill, val int) {
if b.Extensions == nil {
b.Extensions = make(map[string]*any.Any)
}
any, err := ptypes.MarshalAny(&wrappers.Int32Value{Value: int32(val)})
if err != nil {
panic(err)
}
b.Extensions[openSlotsKey] = any
}
// statelessMMF is a MMF implementation which is used in scenario when we want MMF to fill backfills created by a Gameserver. It doesn't create
// or update any backfill.
// 1. FetchMatches is called
// 2. MMF gets a backfill
// 3. MMF grabs ticketsPerMatch tickets and makes a match with the backfill
// 4. MMF returns the match as a result
func statelessMMF(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket, ticketsPerMatch int) ([]*pb.Match, error) {
var matches []*pb.Match
for pool, backfills := range poolBackfills {
tickets, ok := poolTickets[pool]
if !ok || len(tickets) == 0 {
// no tickets in pool
continue
}
for _, b := range backfills {
l := len(tickets)
if l == 0 {
// no tickets left
break
}
if l > ticketsPerMatch && ticketsPerMatch > 0 {
l = ticketsPerMatch
}
matches = append(matches, &pb.Match{
MatchId: fmt.Sprintf("profile-%v-time-%v-%v", p.GetName(), time.Now().Format("2006-01-02T15:04:05.00"), len(matches)),
Tickets: tickets[0:l],
MatchProfile: p.GetName(),
MatchFunction: "backfill",
Backfill: b,
})
tickets = tickets[l:]
}
}
return matches, nil
}
func (s *BackfillScenario) Evaluate(stream pb.Evaluator_EvaluateServer) error {
tickets := map[string]struct{}{}
backfills := map[string]struct{}{}
matchIds := []string{}
outer:
for {
req, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
return fmt.Errorf("failed to read evaluator input stream: %w", err)
}
m := req.GetMatch()
if _, ok := backfills[m.Backfill.Id]; ok {
continue outer
}
for _, t := range m.Tickets {
if _, ok := tickets[t.Id]; ok {
continue outer
}
}
for _, t := range m.Tickets {
tickets[t.Id] = struct{}{}
}
matchIds = append(matchIds, m.GetMatchId())
}
for _, id := range matchIds {
err := stream.Send(&pb.EvaluateResponse{MatchId: id})
if err != nil {
return fmt.Errorf("failed to sending evaluator output stream: %w", err)
}
}
return nil
}

View File

@ -78,7 +78,11 @@ func (b *BattleRoyalScenario) Ticket() *pb.Ticket {
}
}
func (b *BattleRoyalScenario) MatchFunction(p *pb.MatchProfile, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
func (b *BattleRoyalScenario) Backfill() *pb.Backfill {
return nil
}
func (b *BattleRoyalScenario) MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
const playersInMatch = 100
tickets := poolTickets[poolName]

View File

@ -33,7 +33,7 @@ func Scenario() *FirstMatchScenario {
type FirstMatchScenario struct {
}
func (_ *FirstMatchScenario) Profiles() []*pb.MatchProfile {
func (*FirstMatchScenario) Profiles() []*pb.MatchProfile {
return []*pb.MatchProfile{
{
Name: "entirePool",
@ -46,11 +46,15 @@ func (_ *FirstMatchScenario) Profiles() []*pb.MatchProfile {
}
}
func (_ *FirstMatchScenario) Ticket() *pb.Ticket {
func (*FirstMatchScenario) Ticket() *pb.Ticket {
return &pb.Ticket{}
}
func (_ *FirstMatchScenario) MatchFunction(p *pb.MatchProfile, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
func (*FirstMatchScenario) Backfill() *pb.Backfill {
return nil
}
func (*FirstMatchScenario) MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
tickets := poolTickets[poolName]
var matches []*pb.Match
@ -68,7 +72,7 @@ func (_ *FirstMatchScenario) MatchFunction(p *pb.MatchProfile, poolTickets map[s
// fifoEvaluate accepts all matches which don't contain the same ticket as in a
// previously accepted match. Essentially first to claim the ticket wins.
func (_ *FirstMatchScenario) Evaluate(stream pb.Evaluator_EvaluateServer) error {
func (*FirstMatchScenario) Evaluate(stream pb.Evaluator_EvaluateServer) error {
used := map[string]struct{}{}
// TODO: once the evaluator client supports sending and receiving at the

View File

@ -19,9 +19,8 @@ import (
"github.com/sirupsen/logrus"
"google.golang.org/grpc"
"open-match.dev/open-match/examples/scale/scenarios/battleroyal"
"open-match.dev/open-match/examples/scale/scenarios/backfill"
"open-match.dev/open-match/examples/scale/scenarios/firstmatch"
"open-match.dev/open-match/examples/scale/scenarios/teamshooter"
"open-match.dev/open-match/internal/util/testing"
"open-match.dev/open-match/pkg/matchfunction"
"open-match.dev/open-match/pkg/pb"
@ -40,11 +39,14 @@ type GameScenario interface {
// Ticket creates a new ticket, with randomized parameters.
Ticket() *pb.Ticket
// Backfill creates a new backfill, with randomized parameters.
Backfill() *pb.Backfill
// Profiles lists all of the profiles that should run.
Profiles() []*pb.MatchProfile
// MatchFunction is the custom logic implementation of the match function.
MatchFunction(p *pb.MatchProfile, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error)
MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error)
// Evaluate is the custom logic implementation of the evaluator.
Evaluate(stream pb.Evaluator_EvaluateServer) error
@ -56,18 +58,26 @@ var ActiveScenario = func() *Scenario {
// TODO: Select which scenario to use based on some configuration or choice,
// so it's easier to run different scenarios without changing code.
gs = battleroyal.Scenario()
gs = teamshooter.Scenario()
//gs = battleroyal.Scenario()
//gs = teamshooter.Scenario()
s := backfill.Scenario()
gs = s
return &Scenario{
FrontendTotalTicketsToCreate: -1,
FrontendTicketCreatedQPS: 100,
FrontendTotalTicketsToCreate: -1,
FrontendTicketCreatedQPS: 100,
FrontendCreatesBackfillsOnStart: true,
FrontendTotalBackfillsToCreate: 1000,
FrontendDeletesTickets: true,
BackendAssignsTickets: true,
BackendDeletesTickets: true,
BackendAssignsTickets: false,
BackendAcknowledgesBackfills: true,
BackendDeletesBackfills: true,
Ticket: gs.Ticket,
Profiles: gs.Profiles,
Ticket: gs.Ticket,
Backfill: gs.Backfill,
BackfillDeleteCond: s.BackfillDeleteCond,
Profiles: gs.Profiles,
MMF: queryPoolsWrapper(gs.MatchFunction),
Evaluator: gs.Evaluate,
@ -87,17 +97,23 @@ type Scenario struct {
// TicketExtensionSize int
// PendingTicketNumber int
// MatchExtensionSize int
FrontendTotalTicketsToCreate int // TotalTicketsToCreate = -1 let scale-frontend create tickets forever
FrontendTicketCreatedQPS uint32
FrontendTicketCreatedQPS uint32
FrontendTotalTicketsToCreate int // TotalTicketsToCreate = -1 let scale-frontend create tickets forever
FrontendTotalBackfillsToCreate int
FrontendCreatesBackfillsOnStart bool
FrontendDeletesTickets bool
// GameBackend Configs
// ProfileNumber int
// FilterNumber int
BackendAssignsTickets bool
BackendDeletesTickets bool
BackendAssignsTickets bool
BackendAcknowledgesBackfills bool
BackendDeletesBackfills bool
Ticket func() *pb.Ticket
Profiles func() []*pb.MatchProfile
Ticket func() *pb.Ticket
Backfill func() *pb.Backfill
BackfillDeleteCond func(*pb.Backfill) bool
Profiles func() []*pb.MatchProfile
MMF matchFunction
Evaluator evaluatorFunction
@ -122,7 +138,7 @@ func getQueryServiceGRPCClient() pb.QueryServiceClient {
return pb.NewQueryServiceClient(conn)
}
func queryPoolsWrapper(mmf func(req *pb.MatchProfile, pools map[string][]*pb.Ticket) ([]*pb.Match, error)) matchFunction {
func queryPoolsWrapper(mmf func(req *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error)) matchFunction {
var q pb.QueryServiceClient
var startQ sync.Once
@ -136,7 +152,12 @@ func queryPoolsWrapper(mmf func(req *pb.MatchProfile, pools map[string][]*pb.Tic
return err
}
proposals, err := mmf(req.GetProfile(), poolTickets)
poolBackfills, err := matchfunction.QueryBackfillPools(stream.Context(), q, req.GetProfile().GetPools())
if err != nil {
return err
}
proposals, err := mmf(req.GetProfile(), poolBackfills, poolTickets)
if err != nil {
return err
}

View File

@ -154,9 +154,13 @@ func (t *TeamShooterScenario) Ticket() *pb.Ticket {
}
}
func (t *TeamShooterScenario) Backfill() *pb.Backfill {
return nil
}
// MatchFunction puts tickets into matches based on their skill, finding the
// required number of tickets for a game within the maximum skill difference.
func (t *TeamShooterScenario) MatchFunction(p *pb.MatchProfile, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
func (t *TeamShooterScenario) MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
skill := func(t *pb.Ticket) float64 {
return t.SearchFields.DoubleArgs[skillArg]
}

View File

@ -18,13 +18,15 @@
"links": [],
"panels": [
{
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 0
},
"id": 16,
"id": 28,
"panels": [],
"title": "Iterations",
"type": "row"
},
@ -130,11 +132,317 @@
"x": 0,
"y": 9
},
"id": 16,
"panels": [],
"title": "Backfills",
"type": "row"
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"fill": 1,
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 10
},
"id": 30,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"expr": "sum(rate(scale_backend_backfills_deleted[5m]))",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "Backfilld Deleted per second",
"refId": "B"
},
{
"expr": "sum(rate(scale_backend_backfill_deletes_failed[5m]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Backfill Deletions Failed per second",
"refId": "C"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Backfill Deletion",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 18
},
"id": 14,
"panels": [],
"title": "Tickets",
"type": "row"
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"fill": 0,
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 19
},
"id": 26,
"legend": {
"alignAsTable": true,
"avg": false,
"current": true,
"max": false,
"min": false,
"rightSide": true,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"expr": "histogram_quantile(0.99, sum(rate(scale_frontend_tickets_time_to_assignment_bucket[5m])) by (le))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "99%-ile",
"refId": "A"
},
{
"expr": "histogram_quantile(0.95, sum(rate(scale_frontend_tickets_time_to_assignment_bucket[5m])) by (le))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "95%-ile",
"refId": "B"
},
{
"expr": "histogram_quantile(0.90, sum(rate(scale_frontend_tickets_time_to_assignment_bucket[5m])) by (le))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "90%-ile",
"refId": "C"
},
{
"expr": "histogram_quantile(0.50, sum(rate(scale_frontend_tickets_time_to_assignment_bucket[5m])) by (le))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "50%-ile",
"refId": "D"
},
{
"expr": "histogram_quantile(0.10, sum(rate(scale_frontend_tickets_time_to_assignment_bucket[5m])) by (le))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "10%-ile",
"refId": "E"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Ticket Time to Assignment",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "ms",
"label": null,
"logBase": 2,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"fill": 1,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 19
},
"id": 12,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"expr": "sum(rate(scale_backend_sum_tickets_returned[5m]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Backend Tickets in Matches pers second",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Tickets In Matches",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
@ -146,7 +454,7 @@
"h": 9,
"w": 12,
"x": 0,
"y": 10
"y": 27
},
"id": 2,
"legend": {
@ -242,12 +550,12 @@
"dashes": false,
"fill": 1,
"gridPos": {
"h": 9,
"h": 8,
"w": 12,
"x": 12,
"y": 10
"y": 28
},
"id": 12,
"id": 22,
"legend": {
"avg": false,
"current": false,
@ -272,18 +580,26 @@
"steppedLine": false,
"targets": [
{
"expr": "sum(rate(scale_backend_sum_tickets_returned[5m]))",
"expr": "sum(rate(scale_frontend_tickets_deleted[5m]))",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "Backend Tickets Deleted per second",
"refId": "B"
},
{
"expr": "sum(rate(scale_frontend_ticket_deletes_failed[5m]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Backend Tickets in Matches pers second",
"refId": "A"
"legendFormat": "Backend Ticket Deletions Failed per second",
"refId": "C"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Tickets In Matches",
"title": "Ticket Deletion",
"tooltip": {
"shared": true,
"sort": 0,
@ -331,7 +647,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 19
"y": 36
},
"id": 24,
"legend": {
@ -414,106 +730,13 @@
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"fill": 1,
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 19
},
"id": 22,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"expr": "sum(rate(scale_backend_tickets_deleted[5m]))",
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "Backend Tickets Deleted per second",
"refId": "B"
},
{
"expr": "sum(rate(scale_backend_ticket_deletes_failed[5m]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "Backend Ticket Deletions Failed per second",
"refId": "C"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Ticket Deletion",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"collapsed": false,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 27
"y": 44
},
"id": 18,
"panels": [],
@ -530,7 +753,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 28
"y": 45
},
"id": 6,
"legend": {
@ -616,7 +839,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 28
"y": 45
},
"id": 19,
"legend": {
@ -705,7 +928,7 @@
"h": 1,
"w": 24,
"x": 0,
"y": 36
"y": 53
},
"id": 21,
"panels": [],
@ -722,7 +945,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 37
"y": 54
},
"id": 8,
"legend": {
@ -807,7 +1030,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 37
"y": 54
},
"id": 10,
"legend": {
@ -890,7 +1113,7 @@
}
}
],
"refresh": "",
"refresh": "10s",
"schemaVersion": 18,
"style": "dark",
"tags": [],