Make sets have a lookup map for cards
This commit is contained in:
parent
143ee76cc7
commit
8dfb772a2b
4 changed files with 35 additions and 16 deletions
|
@ -118,9 +118,9 @@ func (set *Set) ProviderByRarity(rarity Rarity) draft.CardProvider {
|
||||||
}
|
}
|
||||||
collection = rr
|
collection = rr
|
||||||
} else {
|
} else {
|
||||||
for _, card := range set.Cards {
|
for _, card := range set.CardList {
|
||||||
if card.Rarity == rarity {
|
if set.CardData[card.ID].Rarity == rarity {
|
||||||
collection = append(collection, draft.Card{ID: card.ID})
|
collection = append(collection, card)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ func TestAlternates(t *testing.T) {
|
||||||
|
|
||||||
// Find all Premiere URs
|
// Find all Premiere URs
|
||||||
prURs := []string{}
|
prURs := []string{}
|
||||||
for _, card := range prSet.Cards {
|
for _, card := range prSet.CardData {
|
||||||
if card.Rarity == mlp.RarityUltraRare {
|
if card.Rarity == mlp.RarityUltraRare {
|
||||||
prURs = append(prURs, card.ID)
|
prURs = append(prURs, card.ID)
|
||||||
}
|
}
|
||||||
|
@ -38,7 +38,7 @@ func TestAlternates(t *testing.T) {
|
||||||
|
|
||||||
// Find all CN URs
|
// Find all CN URs
|
||||||
cnURs := []string{}
|
cnURs := []string{}
|
||||||
for _, card := range cnSet.Cards {
|
for _, card := range cnSet.CardData {
|
||||||
if card.Rarity == mlp.RarityUltraRare {
|
if card.Rarity == mlp.RarityUltraRare {
|
||||||
cnURs = append(cnURs, card.ID)
|
cnURs = append(cnURs, card.ID)
|
||||||
}
|
}
|
||||||
|
|
39
mlp/set.go
39
mlp/set.go
|
@ -13,9 +13,10 @@ import (
|
||||||
|
|
||||||
// Set is a set/expansion of MLP:CCG
|
// Set is a set/expansion of MLP:CCG
|
||||||
type Set struct {
|
type Set struct {
|
||||||
ID SetID
|
ID SetID
|
||||||
Name string
|
Name string
|
||||||
Cards []Card
|
CardList []draft.Card
|
||||||
|
CardData map[string]Card
|
||||||
}
|
}
|
||||||
|
|
||||||
// Card is a single MLP:CCG card in a set
|
// Card is a single MLP:CCG card in a set
|
||||||
|
@ -44,6 +45,24 @@ func (c Card) ToDraftCard() draft.Card {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// jsonSet is the set as serialized in the JSON files
|
||||||
|
type jsonSet struct {
|
||||||
|
ID SetID
|
||||||
|
Name string
|
||||||
|
Cards []Card
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *jsonSet) toSet() (s Set) {
|
||||||
|
s.Name = j.Name
|
||||||
|
s.CardData = make(map[string]Card)
|
||||||
|
s.CardList = make([]draft.Card, len(j.Cards))
|
||||||
|
for i, card := range j.Cards {
|
||||||
|
s.CardData[card.ID] = card
|
||||||
|
s.CardList[i] = draft.Card{ID: card.ID}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// PowerRequirement denotes one or more power requirements, colored or not
|
// PowerRequirement denotes one or more power requirements, colored or not
|
||||||
type PowerRequirement map[string]int
|
type PowerRequirement map[string]int
|
||||||
|
|
||||||
|
@ -87,15 +106,15 @@ func LoadSetBytes(id SetID, setdata []byte) (*Set, error) {
|
||||||
return set, nil
|
return set, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var set Set
|
var jsonset jsonSet
|
||||||
err := json.Unmarshal(setdata, &set)
|
err := json.Unmarshal(setdata, &jsonset)
|
||||||
set.ID = id
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
// If set loaded fine, cache it
|
|
||||||
if err == nil {
|
|
||||||
loadedSets[set.ID] = &set
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
set := jsonset.toSet()
|
||||||
|
set.ID = id
|
||||||
|
loadedSets[set.ID] = &set
|
||||||
return &set, err
|
return &set, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -136,7 +136,7 @@ func TestLoadCache(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that loaded set via HTTP is the dummy cache and not the real thing
|
// Check that loaded set via HTTP is the dummy cache and not the real thing
|
||||||
if len(loadedset.Cards) != 0 {
|
if len(loadedset.CardList) != 0 {
|
||||||
t.Fatalf("[LoadSetHTTP] Set not loaded from cache")
|
t.Fatalf("[LoadSetHTTP] Set not loaded from cache")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue