diff --git a/main.py b/main.py
index 5028d47d..ae0ba3ac 100644
--- a/main.py
+++ b/main.py
@@ -16,8 +16,6 @@ presets = {
"isfullspoil": False, # when full spoil comes around, we only want to use WOTC images
"includeMasterpieces": True, # if the set has masterpieces, let's get those too
"oldRSS": False, # maybe MTGS hasn't updated their spoiler.rss but new cards have leaked
- "split_cards": {
- },
# if we want to use one site before another for card data TODO
"siteorder": ['scryfall', 'mtgs', 'mythicspoiler'],
# prioritize images from certain sources
@@ -126,8 +124,7 @@ if __name__ == '__main__':
else:
mtgs = mtgs_scraper.scrape_mtgs(
'http://www.mtgsalvation.com/spoilers.rss') # scrape mtgs rss feed
- [mtgs, split_cards] = mtgs_scraper.parse_mtgs(
- mtgs, [], [], [], presets['split_cards']) # parse spoilers into mtgjson format
+ mtgs = mtgs_scraper.parse_mtgs(mtgs) # parse spoilers into mtgjson format
mtgs = spoilers.correct_cards(
mtgs, manual_sets[setinfo['code']], card_corrections, delete_cards['delete']) # fix using the fixfiles
mtgjson = spoilers.get_image_urls(
diff --git a/mtgs_scraper.py b/mtgs_scraper.py
index 61e2c862..03b86527 100644
--- a/mtgs_scraper.py
+++ b/mtgs_scraper.py
@@ -11,7 +11,7 @@ def scrape_mtgs(url):
return requests.get(url, headers={'Cache-Control': 'no-cache', 'Pragma': 'no-cache', 'Expires': 'Thu, 01 Jan 1970 00:00:00 GMT'}).text
-def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], split_cards={}, related_cards=[]):
+def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], related_cards=[]):
mtgs = mtgs.replace('utf-16', 'utf-8')
patterns = ['Name: (?P.*?)<',
'Cost: (?P[X]*\d{0,2}[XWUBRGC]*?)<',
@@ -71,10 +71,10 @@ def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], spli
card2['setnumber'] = card['setnumber'] + 'b'
if 'rarity' in card:
card2['rarity'] = card['rarity']
- if not card1['name'] in split_cards:
- split_cards[card1['name']] = card2['name']
card1['layout'] = 'aftermath'
card2['layout'] = 'aftermath'
+ card1['names'] = [card1['name'], card2['name']]
+ card2['names'] = [card1['name'], card2['name']]
cards2.append(card1)
cards2.append(card2)
else:
@@ -167,21 +167,7 @@ def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], spli
cardnames.append(card['name'])
cardnumber += 'b'
cardnames = []
- if card['name'] in split_cards:
- cardnames.append(card['name'])
- cardnames.append(split_cards[card['name']])
- cardnumber = cardnumber.replace('b', '').replace('a', '') + 'a'
- if not 'layout' in card:
- card['layout'] = 'split'
- for namematch in split_cards:
- if card['name'] == split_cards[namematch]:
- if not 'layout' in card or ('layout' in card and card['layout'] == ''):
- card['layout'] = 'split'
- cardnames.append(namematch)
- if not card['name'] in cardnames:
- cardnames.append(card['name'])
- cardnumber = cardnumber.replace(
- 'b', '').replace('a', '') + 'b'
+
if 'number' in card:
if 'b' in card['number'] or 'a' in card['number']:
if not 'layout' in card:
@@ -231,6 +217,8 @@ def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], spli
cardjson["colors"] = card['colorArray']
if len(cardnames) > 1:
cardjson["names"] = cardnames
+ if 'names' in card:
+ cardjson['names'] = card['names']
if cardpower or cardpower == '0':
cardjson["power"] = cardpower
cardjson["toughness"] = cardtoughness
@@ -241,7 +229,7 @@ def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], spli
cardarray.append(cardjson)
- return [{"cards": cardarray}, split_cards]
+ return {"cards": cardarray}
def scrape_mtgs_images(url='http://www.mtgsalvation.com/spoilers/183-hour-of-devastation', mtgscardurl='http://www.mtgsalvation.com/cards/hour-of-devastation/', exemptlist=[]):
diff --git a/spoilers.py b/spoilers.py
index 6fb03597..22064560 100644
--- a/spoilers.py
+++ b/spoilers.py
@@ -357,7 +357,7 @@ def get_image_urls(mtgjson, isfullspoil, code, name, size=269, setinfo=False):
return mtgjson
-def write_xml(mtgjson, code, name, releaseDate, split_cards=[]):
+def write_xml(mtgjson, code, name, releaseDate):
if not os.path.isdir('out/'):
os.makedirs('out/')
cardsxml = open('out/' + code + '.xml', 'w+')
@@ -383,8 +383,8 @@ def write_xml(mtgjson, code, name, releaseDate, split_cards=[]):
"\n")
# print mtgjson
for card in mtgjson["cards"]:
- for carda in split_cards:
- if card["name"] == split_cards[carda]:
+ if 'names' in card:
+ if card["name"] == card['names'][1]:
continue
if count == 0:
newest = card["name"]
diff --git a/wizards_scraper.py b/wizards_scraper.py
index a2c56957..f317cd97 100644
--- a/wizards_scraper.py
+++ b/wizards_scraper.py
@@ -5,7 +5,7 @@ from PIL import Image
import os
-def scrape_fullspoil(url="http://magic.wizards.com/en/articles/archive/card-image-gallery/hour-devastation", setinfo={"code": "HOU"}, showRarityColors=False, showFrameColors=False, manual_cards=[], delete_cards=[], split_cards=[]):
+def scrape_fullspoil(url="http://magic.wizards.com/en/articles/archive/card-image-gallery/hour-devastation", setinfo={"code": "HOU"}, showRarityColors=False, showFrameColors=False, manual_cards=[], delete_cards=[]):
if 'name' in setinfo:
url = 'http://magic.wizards.com/en/articles/archive/card-image-gallery/' + setinfo['name'].lower().replace('of', '').replace(
' ', ' ').replace(' ', '-')
@@ -48,7 +48,7 @@ def scrape_fullspoil(url="http://magic.wizards.com/en/articles/archive/card-imag
return fullspoil
-def get_rarities_by_symbol(fullspoil, setcode, split_cards=[]):
+def get_rarities_by_symbol(fullspoil, setcode):
symbolPixels = (240, 219, 242, 221)
highVariance = 15
colorAverages = {
@@ -100,7 +100,7 @@ def get_rarities_by_symbol(fullspoil, setcode, split_cards=[]):
return fullspoil
-def get_colors_by_frame(fullspoil, setcode, split_cards={}):
+def get_colors_by_frame(fullspoil, setcode):
framePixels = (20, 11, 76, 16)
highVariance = 10
colorAverages = {
@@ -145,7 +145,7 @@ def get_colors_by_frame(fullspoil, setcode, split_cards={}):
return fullspoil
-def get_mana_symbols(fullspoil={}, setcode="HOU", split_cards=[]):
+def get_mana_symbols(fullspoil={}, setcode="HOU"):
manaBoxes = [(234, 23, 244, 33), (220, 23, 230, 33),
(206, 23, 216, 33), (192, 23, 202, 33), (178, 23, 188, 33)]
highVariance = 0