mirror of
https://github.com/Cockatrice/Magic-Spoiler.git
synced 2026-03-22 02:06:17 -05:00
Compare commits
220 Commits
2018-01-12
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4392c34919 | ||
|
|
f8cdbc9a7f | ||
|
|
4cef743e46 | ||
|
|
2f76311165 | ||
|
|
1060f659a5 | ||
|
|
2f23aa93c6 | ||
|
|
34068d4d5e | ||
|
|
daa0f09a5e | ||
|
|
85e4bd093b | ||
|
|
257282374f | ||
|
|
f3ac505dac | ||
|
|
10aceba7a9 | ||
|
|
6a14f8889e | ||
|
|
04150f692e | ||
|
|
d74a762f35 | ||
|
|
cae671b37f | ||
|
|
6d0ca400be | ||
|
|
0b4b3bad0f | ||
|
|
d13dc6b799 | ||
|
|
4d049b92dc | ||
|
|
8459b3bfdd | ||
|
|
b7c6f5e896 | ||
|
|
7678fadeca | ||
|
|
0efd986a16 | ||
|
|
c7a7a66523 | ||
|
|
c619c4ec0b | ||
|
|
ab0ad6f6bf | ||
|
|
bdb0c5367e | ||
|
|
35c063b909 | ||
|
|
2f5bcd39c5 | ||
|
|
fee97dcfa9 | ||
|
|
9df6be76c8 | ||
|
|
3dd54f59d8 | ||
|
|
5e716ecdfd | ||
|
|
4681501866 | ||
|
|
9eab59de79 | ||
|
|
4692b6230e | ||
|
|
6a92707354 | ||
|
|
ef39de857f | ||
|
|
c4764f3c17 | ||
|
|
e9d8f09136 | ||
|
|
8deb35f745 | ||
|
|
bd910e37de | ||
|
|
bb7980bec2 | ||
|
|
f17023cf47 | ||
|
|
bb0e3b89ea | ||
|
|
c0d726a9e2 | ||
|
|
a49a417e02 | ||
|
|
32fc4ef4fc | ||
|
|
5d94c7a3d5 | ||
|
|
c8b5317293 | ||
|
|
5e6d9ad5f7 | ||
|
|
a33e90021b | ||
|
|
d234a9ccef | ||
|
|
13023af780 | ||
|
|
433f469140 | ||
|
|
b6b518b1d9 | ||
|
|
9697c74d08 | ||
|
|
bf02be35c8 | ||
|
|
0610f60c9f | ||
|
|
8afb4cee34 | ||
|
|
d54f412793 | ||
|
|
1adfed5fc6 | ||
|
|
ef0ced8d69 | ||
|
|
929a04c915 | ||
|
|
8bceceae6e | ||
|
|
bcfb1038c7 | ||
|
|
47c3b43963 | ||
|
|
764ff7203c | ||
|
|
6fd325b176 | ||
|
|
4791c9e2d2 | ||
|
|
b18bc002d9 | ||
|
|
4db482a3c8 | ||
|
|
8a048f0770 | ||
|
|
4dc0fb8761 | ||
|
|
15ae2677cd | ||
|
|
ff54e754c8 | ||
|
|
d365ca0375 | ||
|
|
bb2d56df14 | ||
|
|
26e1e22ad0 | ||
|
|
4ea02869be | ||
|
|
31ba735d95 | ||
|
|
0f4ec17eb6 | ||
|
|
103279671d | ||
|
|
64c7ba430f | ||
|
|
4b25618657 | ||
|
|
c24ab7574d | ||
|
|
b5d1d938a8 | ||
|
|
a242ebb1d1 | ||
|
|
a73195f022 | ||
|
|
fd65e743cc | ||
|
|
5dd5d37680 | ||
|
|
7c791d4f01 | ||
|
|
9e73420094 | ||
|
|
9481e5ff24 | ||
|
|
91a615f77d | ||
|
|
37f2d8e19b | ||
|
|
db458c19b7 | ||
|
|
7287cb5fbc | ||
|
|
7a138354f4 | ||
|
|
da60525795 | ||
|
|
a4bdf439ec | ||
|
|
aec3dd7618 | ||
|
|
8cfbdb7d87 | ||
|
|
7fb119e52e | ||
|
|
9f3d1839e9 | ||
|
|
ce18105f1f | ||
|
|
e49f6a46cd | ||
|
|
a1e2ac1893 | ||
|
|
eeb03ff0cd | ||
|
|
b29407d718 | ||
|
|
f34decf10a | ||
|
|
33896b5a78 | ||
|
|
54fc3aae83 | ||
|
|
69fac381f9 | ||
|
|
a54349b037 | ||
|
|
4458316d62 | ||
|
|
c136e4122e | ||
|
|
f4e6d80e89 | ||
|
|
66b2ff35cc | ||
|
|
65d4d5afdb | ||
|
|
c0a584bfbd | ||
|
|
c28882d0fd | ||
|
|
d3c20d34d1 | ||
|
|
9762c04041 | ||
|
|
a2865b2de0 | ||
|
|
557fd2a4dc | ||
|
|
c09a073fe1 | ||
|
|
d57d0c9278 | ||
|
|
ed895ebf39 | ||
|
|
b2891a2808 | ||
|
|
968e8fff47 | ||
|
|
e148d8d186 | ||
|
|
1d6dcc0e07 | ||
|
|
b6d3875527 | ||
|
|
c7da69bfab | ||
|
|
5dff95b3df | ||
|
|
0f794b243e | ||
|
|
5028791018 | ||
|
|
7b2c9801c7 | ||
|
|
5c646eb10b | ||
|
|
871d39d56b | ||
|
|
a497d7e81a | ||
|
|
297408ca83 | ||
|
|
ad229b3227 | ||
|
|
c7a64d89e6 | ||
|
|
83d04f0242 | ||
|
|
3eb796cbff | ||
|
|
9025ac85ee | ||
|
|
50f9a0f421 | ||
|
|
d983fab920 | ||
|
|
b9d2034948 | ||
|
|
cb88104849 | ||
|
|
c9fc1b2447 | ||
|
|
f9951aedb9 | ||
|
|
5629d02f3c | ||
|
|
1a7f698e58 | ||
|
|
84b24026e5 | ||
|
|
68c7150ac1 | ||
|
|
b2c858f4c6 | ||
|
|
e882cd6500 | ||
|
|
77744522b3 | ||
|
|
3538edc9cd | ||
|
|
9cefe75576 | ||
|
|
510fb05931 | ||
|
|
15ca5a4721 | ||
|
|
0da1b24cdc | ||
|
|
68ef367ea9 | ||
|
|
2f06dcc95d | ||
|
|
20212b176f | ||
|
|
b45f57f5c6 | ||
|
|
3b01ffbec8 | ||
|
|
6480752ede | ||
|
|
0b021b377f | ||
|
|
aad0479ca1 | ||
|
|
9c1ace825f | ||
|
|
667b49019f | ||
|
|
34c84e0b16 | ||
|
|
3292ebc731 | ||
|
|
def50915bc | ||
|
|
7c0cec2749 | ||
|
|
6ed303f75e | ||
|
|
f6af154852 | ||
|
|
be008917ca | ||
|
|
12a2b48a90 | ||
|
|
5cda79a339 | ||
|
|
0f6766945d | ||
|
|
8e65cfe357 | ||
|
|
28f19a2071 | ||
|
|
73b7c933a3 | ||
|
|
91cf650082 | ||
|
|
a896fef12e | ||
|
|
075dd7a40d | ||
|
|
4767cdffc9 | ||
|
|
41011824b6 | ||
|
|
4b11546a8a | ||
|
|
d83827b4de | ||
|
|
0cb9bb1948 | ||
|
|
4a79afc0de | ||
|
|
b419569752 | ||
|
|
00c4a45663 | ||
|
|
e2906c31da | ||
|
|
039a179b41 | ||
|
|
69cbce049c | ||
|
|
b2c33879cb | ||
|
|
2826ce15b9 | ||
|
|
0a7ad363f6 | ||
|
|
250dcc10a6 | ||
|
|
d089a398d3 | ||
|
|
5364e9fb95 | ||
|
|
bd81ece2dc | ||
|
|
0a7561970e | ||
|
|
1a6dbebdb5 | ||
|
|
36c4eb7540 | ||
|
|
292c7fc432 | ||
|
|
711a349e9e | ||
|
|
f1bb2c2d36 | ||
|
|
be97d7d075 | ||
|
|
32f2cc48dc | ||
|
|
e31191b7e0 |
30
.github/CONTRIBUTING.md
vendored
30
.github/CONTRIBUTING.md
vendored
|
|
@ -1,17 +1,25 @@
|
|||
# Contributing to SpoilerSeason #
|
||||
Thank you for your interest in contributing to SpoilerSeason!
|
||||
This project is an attempt to create a central source for new Magic: the Gathering spoilers and provide data files for miscellaneous projects including our friends over at [Cockatrice](https://github.com/Cockatrice/Cockatrice)
|
||||
# Contributing to Magic-Spoiler #
|
||||
Thank you for your interest in contributing to Magic-Spoiler!<br>
|
||||
This project is an attempt to create a central source for new Magic: the Gathering spoilers and provide data files for miscellaneous projects like [Cockatrice](https://github.com/Cockatrice/Cockatrice).
|
||||
|
||||
|
||||
## How can I help? ##
|
||||
SpoilerSeason grabs its data from many sources, but those sources often contain errors. If you just want to improve the card data and fix errors, you can start in the [errors.json](https://github.com/Cockatrice/Magic-Spoiler/blob/files/errors.json) file in the [files branch](https://github.com/Cockatrice/Magic-Spoiler/tree/files) or our [issue tracker.](https://github.com/Cockatrice/Magic-Spoiler/issues)
|
||||
Once you've found an error, whether it be in the errors.json file or from using the data files, make sure that error hasn't already been fixed in the appropriate file on the [files branch.](https://github.com/Cockatrice/Magic-Spoiler/tree/files) If it's still present, let's get it fixed!
|
||||
- If the error is with one of the fields in a card, check our [cards_corrections.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_corrections.json) file. The syntax for this file is `"cardname": { "field to correct": "new value" }` If you're fixing the card name, you'd put the bad card name as `cardname`
|
||||
- If the card shouldn't exist at all, check the [cards_delete.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_delete.json) file. This file is just an array of cards to delete. Card name is case sensitive!
|
||||
- If the card is a legitimate spoiler and it isn't showing up yet, you can manually add it. The file you want is [cards_manual.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_manual.json) Make sure you link the spoil source in your Push Request.
|
||||
Magic-Spoiler grabs its data from [Scryfall](https://scryfall.com/), but there can be errors of course.
|
||||
If you want to improve the card data and fix errors for all users, you simply have to report them directly to Scryfall.
|
||||
Once you've found a mistake in our data files, make sure that error hasn't already been fixed at the Scryfall webpage in betweeen. If it's still present there, let's get it fixed!
|
||||
- If the error is with one of the fields in a card (e.g. a spelling error or missing cmc) search for that card on the Scryfall webpage. Below the card art on the left, there are some links. Choose the botton one (`Report card issue`) and provide the information in the form. Once their team check & fixes the errors, it'll show up in our spoiler files, too.<br>
|
||||
It only takes a few days - be patient.
|
||||
- If the card is a legitimate spoiler and it isn't showing up yet, you can request it by [contacting the Scryfall support](https://scryfall.com/contact) and let them know. Make sure to link the official spoiler source in your report.
|
||||
- If the card shouldn't exist at all, let the Scryfall team know as well, please.
|
||||
|
||||
What you should **NOT** do however, is to submit PR's to our files branch and fix the xml files there directly.<br>
|
||||
You have to provide updates to Scryfall as all other changes would get overridden again.
|
||||
|
||||
All Push requests for card fixes should have the name of the card being fixed and the type of fix (fix/correction, delete, or manual). In the details of the PR, you **MUST INCLUDE A VALID LINK** to the page the spoiler is located at. For minor fixes, a link to the card image is OK. And of course link the issue you're fixing if there is one!
|
||||
|
||||
## Anything else? ##
|
||||
If you notice errors, please file an [issue](https://github.com/Cockatrice/Magic-Spoiler/issues)
|
||||
If you notice any other errors or have suggestions to the code, please [file an issue](https://github.com/Cockatrice/Magic-Spoiler/issues) in our repository.
|
||||
We try to follow [PEP8 Style Guide](https://peps.python.org/pep-0008/).
|
||||
|
||||
Code improvement PRs are always welcome!
|
||||
<br>
|
||||
|
||||
**Code improvement PRs are always welcome!**
|
||||
|
|
|
|||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# Configuration options: https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# Enable version updates for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
# Directory must be set to "/" to check for workflow files in .github/workflows
|
||||
directory: "/"
|
||||
# Check for updates to GitHub Actions once a week
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# Limit the amout of open PR's (default = 5, disabled = 0, security updates are not impacted)
|
||||
open-pull-requests-limit: 2
|
||||
73
.github/workflows/deploy.yml
vendored
Normal file
73
.github/workflows/deploy.yml
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
name: Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
schedule:
|
||||
# Every 8 hours = 3 times a day
|
||||
- cron: '0 */8 * * *'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
# Do not run the scheduled workflow on forks
|
||||
if: github.event_name != 'schedule' || github.repository_owner == 'Cockatrice'
|
||||
|
||||
name: Check for new spoiler
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
DEPLOY: ${{github.ref == 'refs/heads/master'}}
|
||||
OUTPUT_PATH: out
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Checkout output branch
|
||||
# Run only when triggered from master
|
||||
if: env.DEPLOY == 'true'
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: files
|
||||
path: ${{env.OUTPUT_PATH}}
|
||||
|
||||
- name: Install requirements using pip
|
||||
shell: bash
|
||||
run: python3 -m pip install --requirement requirements.txt
|
||||
|
||||
- name: Run script
|
||||
id: run
|
||||
shell: bash
|
||||
run: python3 -m magic_spoiler
|
||||
|
||||
- name: Upload artifacts
|
||||
# Run only when triggered from a PR
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: spoiler-output
|
||||
path: ${{github.workspace}}/${{env.OUTPUT_PATH}}
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Deploy changes
|
||||
# Run only when triggered from master and changes are available
|
||||
if: env.DEPLOY == 'true' && steps.run.outputs.deploy == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{env.OUTPUT_PATH}}
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add "*.xml" SpoilerSeasonEnabled
|
||||
git commit -m "Deploy: $GITHUB_SHA"
|
||||
git push
|
||||
deploy_commit=`git rev-parse HEAD`
|
||||
echo "::notice title=New data uploaded::See deployment: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/commit/$deploy_commit"
|
||||
40
.gitignore
vendored
40
.gitignore
vendored
|
|
@ -1,7 +1,6 @@
|
|||
# Project specific
|
||||
out/
|
||||
AllSets.pre.json
|
||||
deploy_key.enc
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
@ -13,19 +12,19 @@ __pycache__/
|
|||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
env
|
||||
build
|
||||
develop-eggs
|
||||
dist
|
||||
downloads
|
||||
eggs
|
||||
.eggs
|
||||
lib
|
||||
lib64
|
||||
parts
|
||||
sdist
|
||||
var
|
||||
*.egg-info
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
|
|
@ -84,8 +83,11 @@ celerybeat-schedule
|
|||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
venv
|
||||
ENV
|
||||
bin
|
||||
include
|
||||
pyvenv.cfg
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
|
@ -95,3 +97,9 @@ ENV/
|
|||
|
||||
# JetBrains
|
||||
.idea
|
||||
|
||||
#Mac Stuff
|
||||
.DS_Store
|
||||
|
||||
*.sqlite
|
||||
.*_cache
|
||||
|
|
|
|||
68
.pylintrc
Normal file
68
.pylintrc
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
[MASTER]
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=
|
||||
bad-continuation,
|
||||
fixme,
|
||||
line-too-long,
|
||||
localled-enabled,
|
||||
locally-disabled,
|
||||
logging-format-interpolation,
|
||||
too-few-public-methods,
|
||||
too-many-statements,
|
||||
wrong-import-order,
|
||||
too-many-branches,
|
||||
import-error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio).You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=colorized
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=
|
||||
f,
|
||||
i,
|
||||
j,
|
||||
k,
|
||||
_,
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=__.*__|test_.*
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=
|
||||
FIXME,
|
||||
XXX,
|
||||
TODO,
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)
|
||||
46
.travis.yml
46
.travis.yml
|
|
@ -1,46 +0,0 @@
|
|||
sudo: false # force container-based builds (no start-up time!)
|
||||
|
||||
dist: trusty # force 14.04 trusty build
|
||||
|
||||
language: python
|
||||
|
||||
cache: pip # enable cache for "$HOME/.cache/pip" directory
|
||||
|
||||
before_install:
|
||||
- export CFLAGS=-O0 # considerably speed-up build time for pip packages (especially lxml), optimizations doesn't matter for ci
|
||||
- pip install PyYAML
|
||||
- python verify_files.py # make sure input files are OK before wasting time with prereqs
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
||||
script: bash ./.travis/deploy.sh
|
||||
|
||||
after_script:
|
||||
- sleep 10 # helps travis finish logging
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
webhooks:
|
||||
urls:
|
||||
- https://webhooks.gitter.im/e/691b9acffe1def5f9d6b
|
||||
on_success: change # options: [always|never|change] default: always
|
||||
on_failure: change # options: [always|never|change] default: always
|
||||
on_start: never # options: [always|never|change] default: always
|
||||
|
||||
deploy:
|
||||
provider: releases
|
||||
api_key:
|
||||
secure: Bin220gU9Re176T/1bIaX/rhGB+uEaw13aoB2/ir0ePHQB0ihasEJcsgmlN8kz93KSN6vp4y2HwMLnz3t7Pn0amTV8QVL/AlOOzjbq8m/1kYTbXdPlYnMsosZPVFLHRan4LEGFsovRia6LO4p9fqC8BDgQl89W/88PlYAMWzao5jTyKKHp8o+sioYhKj9D+86lxLYspQ+6SN0HOCnF2MZ/vZtxbY32ituswAc/sJK1MtZ/PExoMe1nSI2iKCaatXyKA+FVCUNLHRAu4LgB1GfJCLpmlPbvjud8A6WAKNF6poNCvFck+Ox56tt4bw3ggR5W9kTEhvX74l6AEeC7Qz6bHjh1CEngrqFjyaHy25CcygWgagf0DUsvyGRS0RqEx4bz9psD09d+oWihdkJMfa5kRzXtVQD8sxDgsBqEz/DjsMIlf/L5ISSa7lAYiqq65ELpezBFOlvEZ9avOYLcZc7m5/5ZhtcA4HPSqzfn2nhkPpeggBKufMdyc8JIDkvs/JlFsNu46QVvugjbdGvtb4SlQK310py0TOA6nYt7WntDhX3SukKAeh6oHjZaL5aeoSBhnlQRgJfDBqI3+7anLatD30uEKCMp5sWcLrjB1HO9ZH5nceWBg4cMKJvI/zT77h96fCy7uMkPNt867GP8O9KkWVWzxGBkpIdstigNWfT5g=
|
||||
file_glob: true
|
||||
file: out/*.xml
|
||||
skip_cleanup: true
|
||||
overwrite: true
|
||||
on:
|
||||
tags: true
|
||||
branch: master
|
||||
|
||||
env:
|
||||
global:
|
||||
- ENCRYPTION_LABEL: ec68c19ba263
|
||||
- COMMIT_AUTHOR_EMAIL: you@example.com
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e # Exit with nonzero exit code if anything fails
|
||||
|
||||
SOURCE_BRANCH="master"
|
||||
TARGET_BRANCH="files"
|
||||
|
||||
function doCompile {
|
||||
echo "Running script..."
|
||||
python main.py
|
||||
}
|
||||
|
||||
# Pull requests and commits to other branches shouldn't try to deploy, just build to verify
|
||||
if [ "$TRAVIS_PULL_REQUEST" != "false" -o "$TRAVIS_BRANCH" != "$SOURCE_BRANCH" ]; then
|
||||
echo "Skipping deploy; just doing a build."
|
||||
# Run our compile script and let user know in logs
|
||||
doCompile
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Save some useful information
|
||||
REPO=`git config remote.origin.url`
|
||||
SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:}
|
||||
SHA=`git rev-parse --verify HEAD`
|
||||
|
||||
# Clone the existing gh-pages for this repo into out/
|
||||
# Create a new empty branch if gh-pages doesn't exist yet (should only happen on first deply)
|
||||
git clone $REPO out
|
||||
cd out
|
||||
git checkout $TARGET_BRANCH || git checkout --orphan $TARGET_BRANCH
|
||||
cd ..
|
||||
|
||||
# Clean out existing contents
|
||||
rm -rf out/**/* || exit 0
|
||||
|
||||
# Run our compile script and let user know in logs
|
||||
doCompile
|
||||
|
||||
echo TRAVIS_PULL_REQUEST ${TRAVIS_PULL_REQUEST}
|
||||
echo TRAVIS_SECURE_ENV_VARS ${TRAVIS_SECURE_ENV_VARS}
|
||||
echo TRAVIS_EVENT_TYPE ${TRAVIS_EVENT_TYPE}
|
||||
|
||||
# Don't push to our branch for PRs.
|
||||
#if [ "${ghToken:-false}" != "false" ]; then
|
||||
# doCompile
|
||||
#else
|
||||
# doCompile
|
||||
# exit 0
|
||||
#fi
|
||||
|
||||
# Now let's go have some fun with the cloned repo
|
||||
cd out
|
||||
ls
|
||||
git config user.name "Travis CI"
|
||||
git config user.email "$COMMIT_AUTHOR_EMAIL"
|
||||
|
||||
# If there are no changes to the compiled out (e.g. this is a README update) then just bail.
|
||||
#if git diff --quiet; then
|
||||
# echo "No changes to the output on this push; exiting."
|
||||
# exit 0
|
||||
#fi
|
||||
|
||||
# Commit the "changes", i.e. the new version.
|
||||
# The delta will show diffs between new and old versions.
|
||||
# Only commit if more than one line has been changed (datetime in spoiler.xml)
|
||||
CHANGED_FILES=`git diff --numstat --minimal | sed '/^[1-]\s\+[1-]\s\+.*/d' | wc -c`
|
||||
ONLYDATECHANGE=true
|
||||
if [[ $CHANGED_FILES -eq 0 ]]; then
|
||||
for CHANGED_FILE in `git diff --name-only`; do
|
||||
if ! [[ $CHANGED_FILE =~ "spoiler.xml" ]]; then
|
||||
ONLYDATECHANGE=false
|
||||
fi
|
||||
done
|
||||
else
|
||||
ONLYDATECHANGE=false
|
||||
fi
|
||||
if [[ $ONLYDATECHANGE == false ]]; then
|
||||
git add -A .
|
||||
git commit -m "Travis Deploy: ${SHA}"
|
||||
else
|
||||
echo "Only date in spoiler.xml changed, not committing"
|
||||
fi
|
||||
|
||||
# Get the deploy key by using Travis's stored variables to decrypt deploy_key.enc
|
||||
ENCRYPTED_KEY_VAR="encrypted_${ENCRYPTION_LABEL}_key"
|
||||
ENCRYPTED_IV_VAR="encrypted_${ENCRYPTION_LABEL}_iv"
|
||||
ENCRYPTED_KEY=${!ENCRYPTED_KEY_VAR}
|
||||
ENCRYPTED_IV=${!ENCRYPTED_IV_VAR}
|
||||
openssl aes-256-cbc -K $ENCRYPTED_KEY -iv $ENCRYPTED_IV -in ../.travis/deploy_key.enc -out ../deploy_key -d
|
||||
chmod 600 ../deploy_key
|
||||
eval `ssh-agent -s`
|
||||
ssh-add ../deploy_key
|
||||
|
||||
# Now that we're all set up, we can push.
|
||||
git push $SSH_REPO $TARGET_BRANCH
|
||||
|
||||
ssh-agent -k
|
||||
Binary file not shown.
51
README.md
51
README.md
|
|
@ -1,24 +1,35 @@
|
|||
[](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)
|
||||

|
||||
|
||||
[)](https://github.com/Cockatrice/Magic-Spoiler/tree/files) [](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)<br>
|
||||
[](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)<br>
|
||||
[)](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)
|
||||
|
||||
# Magic-Spoiler [](https://gitter.im/Cockatrice/Magic-Spoiler) #
|
||||
<br>
|
||||
|
||||
Magic-Spoiler is a Python script to scrape <i>MTG Salvation</i>, <i>Scryfall</i>, <i>MythicSpoiler</i> and <i>Wizards</i> to compile<br>
|
||||
XML files (Cockatrice formatted) and application-ready json files (mtgjson formatted) with information about spoiled cards from upcoming sets.
|
||||
# Magic-Spoiler [](https://discord.gg/3Z9yzmA) #
|
||||
|
||||
## Output [](https://travis-ci.org/Cockatrice/Magic-Spoiler) ##
|
||||
Just looking for XML or JSON files? [They are in our `files` branch!](https://github.com/Cockatrice/Magic-Spoiler/tree/files)
|
||||
Magic-Spoiler is a Python script to query the [Scryfall](https://scryfall.com) API to compile XML files (Cockatrice formatted) with information about spoiled cards from upcoming sets.
|
||||
|
||||
When run by Travis, the script automatically updates the files and uploads new versions there. ([History of changes](https://github.com/Cockatrice/Magic-Spoiler/commits/files))<br>
|
||||
Travis CI is run daily on a cron job basis.
|
||||
## Output [](https://github.com/Cockatrice/Magic-Spoiler/actions/workflows/deploy.yml?query=branch%3Amaster) ##
|
||||
|
||||
## Errors ##
|
||||
Noticed an error? Check out our [Contributing file](https://github.com/Cockatrice/Magic-Spoiler/blob/master/.github/CONTRIBUTING.md) for information on how to help!
|
||||
>[!TIP]
|
||||
>**Enable "Download Spoilers Automatically" in `Cockatrice → Settings → Card Sources → Spoilers` to get updates automatically pushed to your client!**<br>
|
||||
You can also [add the desired <b>.xml</b> file(s) to your <i>customsets</i> folder manually](https://github.com/Cockatrice/Cockatrice/wiki/Custom-Cards-&-Sets#to-add-custom-sets-follow-these-steps) to make Cockatrice use them.
|
||||
|
||||
Just looking for XML files? [They are in our `files` branch!](https://github.com/Cockatrice/Magic-Spoiler/tree/files)
|
||||
|
||||
When run by our CI, the script automatically updates the files and uploads new versions to this branch. ([History of changes](https://github.com/Cockatrice/Magic-Spoiler/commits/files))<br>
|
||||
GitHub Actions are scheduled to automatically run three times a day.
|
||||
|
||||
## Contributing ##
|
||||
Noticed an error in the card data? Check out our [Contributing file](https://github.com/Cockatrice/Magic-Spoiler/blob/master/.github/CONTRIBUTING.md) for information on how to help fixing it!
|
||||
|
||||
We do happily accept PR's that improve our script as well!
|
||||
|
||||
## Running ##
|
||||
|
||||
### Requirements ###
|
||||
* Python 2.7
|
||||
* Python 3.6
|
||||
* several Python Modules (see [requirements.txt](https://github.com/Cockatrice/Magic-Spoiler/blob/master/requirements.txt))
|
||||
|
||||
```
|
||||
|
|
@ -28,18 +39,14 @@ pip install -r requirements.txt
|
|||
### Usage ###
|
||||
|
||||
```
|
||||
$> python main.py
|
||||
$> python -m magic_spoiler
|
||||
```
|
||||
|
||||
Outputs the following files to `out/` directory:<br>
|
||||
`spoiler.xml`, `{SETCODE}.xml`, `MPS_{SETCODE}.xml`,<br>
|
||||
`spoiler.json`, `{SETCODE}.json`, `MPS_{SETCODE}.json`
|
||||
> **spoiler** → files contain all currently available spoilers from different sets<br>
|
||||
> **{SETCODE}** → files contain just the spoiler available for this single set<br>
|
||||
> **MPS_{SETCODE}** → files contain just the spoiler available for this [Masterpiece Series](http://magic.wizards.com/en/articles/archive/making-magic/masterpiece-series-2016-09-12)
|
||||
### Output ###
|
||||
|
||||
Errors are logged there as well (`errors.json`)
|
||||
All spoiler files are written to the `out/` directory:
|
||||
|
||||
<br>
|
||||
|
||||
[Add the desired <b>.xml</b> file to your <i>customsets</i> folder to make Cockatrice use it.](https://github.com/Cockatrice/Cockatrice/wiki/Custom-Cards-&-Sets#to-add-custom-sets-follow-these-steps)
|
||||
| File Name | Content |
|
||||
|:--|:--|
|
||||
| `spoiler.xml` | file contains **all** currently available spoilers from different **sets** |
|
||||
| `{SET_CODE}.xml` | files contain just the spoiler available for this **single set** |
|
||||
|
|
|
|||
|
|
@ -1,88 +0,0 @@
|
|||
|
||||
### How to Use: #########################################################
|
||||
# #
|
||||
# Each card to fix has to be #
|
||||
# * in its own new array #
|
||||
# #
|
||||
# Each card array starts with #
|
||||
# * a space (" ") #
|
||||
# * exact card name #
|
||||
# * followed by a colon (":") #
|
||||
# #
|
||||
# Each card array consist out of #
|
||||
# * a list of corrected fields #
|
||||
# #
|
||||
# Each card corrections consist out of #
|
||||
# * 3 leading spaces (" ") #
|
||||
# * card attribute with wrong value #
|
||||
# * followed by a colon (":") #
|
||||
# * additional space (" ") #
|
||||
# * new value for that attribute #
|
||||
# #
|
||||
### Hints: ##############################################################
|
||||
# #
|
||||
# * Indentation is critical, two spaces per indentation #
|
||||
# * For a better overview group cards from the same set and #
|
||||
# label them with "#setcode" above the first entry of each set #
|
||||
# #
|
||||
### Form: ###############################################################
|
||||
# #
|
||||
#card name: #
|
||||
# field to fix: new value #
|
||||
# #
|
||||
### Example Entries: ####################################################
|
||||
# #
|
||||
#Jace, the Planeswalker: #
|
||||
# loyalty: 5 #
|
||||
# manaCost: 1UUB #
|
||||
# #
|
||||
#Terror: #
|
||||
# type: Instant #
|
||||
# #
|
||||
### Explanation of Fields and their Values: #############################
|
||||
# #
|
||||
# name: Card Name #
|
||||
# cmc: 4 #
|
||||
# colorIdentity: #
|
||||
# - U #
|
||||
# - B #keep track of https://github.com/mtgjson/mtgjson4/issues/56 #
|
||||
# colors: #
|
||||
# - Blue #
|
||||
# - Black #
|
||||
# manaCost: 1UUB #
|
||||
# number: 140 #
|
||||
# rarity: Mythic Rare #
|
||||
# power: X #
|
||||
# text: "{5}, {T}: You win the game." #
|
||||
# toughness: * #
|
||||
# type: Legendary Artifact Creature - Human Monk #
|
||||
# url: http://wizards.c0m/link/to/card.png #
|
||||
# #
|
||||
#########################################################################
|
||||
# never remove this part of the file, since it will break otherwise #
|
||||
corrections: #
|
||||
- must not be empty! #
|
||||
#########################################################################
|
||||
|
||||
# Enter cards with their corrections below. But note the syntax hints on top!
|
||||
|
||||
Angrath, Minotaur Pirate:
|
||||
loyalty: 5
|
||||
Angrath, the Flame-Chained:
|
||||
loyalty: 4
|
||||
Deadeye Rig Hauler:
|
||||
url: 'http://media-dominaria.cursecdn.com/avatars/202/581/636506899130702235.png'
|
||||
Gleaming Barrier:
|
||||
power: 0
|
||||
toughness: 4
|
||||
Golden Guardian:
|
||||
name: Golden Guardian
|
||||
power: 4
|
||||
toughness: 4
|
||||
Huatli, Radiant Champion:
|
||||
loyalty: 3
|
||||
Steelclad Ferocidons:
|
||||
url: 'http://media-dominaria.cursecdn.com/avatars/202/588/636506917986102886.png'
|
||||
Vraska, Scheming Gorgon:
|
||||
loyalty: 5
|
||||
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
|
||||
### How to Use: #########################################################
|
||||
# #
|
||||
# Each card to delete has to be #
|
||||
# * in its own new line #
|
||||
# #
|
||||
# Each line has to consist out of #
|
||||
# * 2 leading spaces (" ") #
|
||||
# * a dash ("-") #
|
||||
# * additional space (" ") #
|
||||
# * exact card name #
|
||||
# #
|
||||
### Hints: ##############################################################
|
||||
# #
|
||||
# * Cards that begin/end with spaces or contain a colon need quoted #
|
||||
# * Indentation is critical, two spaces per indentation #
|
||||
# * For a better overview group cards from the same set and #
|
||||
# label them with "#setcode" above the first entry of each set #
|
||||
# #
|
||||
### Form: ###############################################################
|
||||
# #
|
||||
# - card name #
|
||||
# #
|
||||
### Example Entry: ######################################################
|
||||
# #
|
||||
# - JUNK NAME TO DELETE #
|
||||
# - " Tocaf's Honor Guard " #
|
||||
# #
|
||||
#########################################################################
|
||||
# never remove this part of the file, since it will break otherwise #
|
||||
delete: #
|
||||
- must not be empty! #
|
||||
#########################################################################
|
||||
|
||||
# Enter cards that should be deleted below. But note the syntax hints on top!
|
||||
|
||||
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
|
||||
### How to Use: ############################################################
|
||||
# #
|
||||
# Each card that you want to manually add has to be #
|
||||
# * in its correct set block #
|
||||
# #
|
||||
# Each set block has to start with #
|
||||
# * set code #
|
||||
# * followed by a colon (":") #
|
||||
# #
|
||||
# Each set block consist out of #
|
||||
# * a list of cards #
|
||||
# #
|
||||
# Each card in the list has to start with #
|
||||
# * 2 leading spaces (" ") #
|
||||
# * a dash ("-") to open an new array for each card #
|
||||
# #
|
||||
# Each new card has to consist out of #
|
||||
# * a new line #
|
||||
# * 4 leading spaces in total (" ") #
|
||||
# * card attribute #
|
||||
# * followed by a colon (":") #
|
||||
# * additional space (" ") #
|
||||
# * value for that attribute #
|
||||
# #
|
||||
### Hints: #################################################################
|
||||
# #
|
||||
# * Each card attribute you want your card to have needs its own line #
|
||||
# * Most important fields are: name, manaCost, rarity, type and url #
|
||||
# * Values for the text field must be surrounded by quatation marks (") #
|
||||
# * Newlines in the text field must be replaced by \n #
|
||||
# * Indentation is critical, two spaces per indentation #
|
||||
# #
|
||||
### Form: ##################################################################
|
||||
# #
|
||||
#set code: #
|
||||
# - #
|
||||
# card field to add: value #
|
||||
# card field to add: value #
|
||||
# - #
|
||||
# card field to add: value #
|
||||
# #
|
||||
#other set code: #
|
||||
# - #
|
||||
# card field to add: value #
|
||||
# #
|
||||
### Example Entry: #########################################################
|
||||
# #
|
||||
#XLN: #
|
||||
# - #
|
||||
# name: Ripjaw Raptor #
|
||||
# manaCost: 2GG #
|
||||
# number: 203 #
|
||||
# rarity: Rare #
|
||||
# type: Creature - Dinosaur #
|
||||
# url: http://mythicspoiler.com/ixa/cards/havenraptor.jpg <--- gath #
|
||||
# text: "Enrage - Whenever Ripjaw Raptor is dealt damage, draw a card." #
|
||||
# cmc: 4 #
|
||||
# power: 4 #
|
||||
# toughness: 5 #
|
||||
# - #
|
||||
# name: Vraska's Contempt #
|
||||
# manaCost: 2BB #
|
||||
# rarity: Rare #
|
||||
# type: Instant #
|
||||
# url: https://media.wizards.com/2017/xln/en_oUjuu5E2th.png <--- ? #
|
||||
# #
|
||||
############################################################################
|
||||
|
||||
# Enter cards that should be added manually directly below. But note the syntax hints on top!
|
||||
1
magic_spoiler/__init__.py
Normal file
1
magic_spoiler/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
"""Magic Spoiler Program"""
|
||||
669
magic_spoiler/__main__.py
Normal file
669
magic_spoiler/__main__.py
Normal file
|
|
@ -0,0 +1,669 @@
|
|||
"""
|
||||
Handle Scryfall Spoilers
|
||||
"""
|
||||
import contextvars
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import IO, Any, Dict, List, Tuple, Union
|
||||
|
||||
import requests
|
||||
import requests_cache
|
||||
from lxml import etree
|
||||
|
||||
SCRYFALL_SET_URL: str = "https://api.scryfall.com/sets/{}"
|
||||
SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_SCRYFALL")
|
||||
SPOILER_SETS: contextvars.ContextVar = contextvars.ContextVar("SPOILER_SETS")
|
||||
SPOILER_MARK = "~"
|
||||
|
||||
OUTPUT_DIR = pathlib.Path("out")
|
||||
OUTPUT_TMP_DIR = OUTPUT_DIR.joinpath("tmp")
|
||||
XML_ESCAPE_TRANSLATE_MAP = str.maketrans(
|
||||
{"&": "&", '"': """, "<": "<", ">": ">"}
|
||||
)
|
||||
# remove any control characters outright
|
||||
XML_ESCAPE_TRANSLATE_MAP.update({i: "" for i in range(ord(" "))})
|
||||
# don't remove whitespace characters in the sub " " range
|
||||
del XML_ESCAPE_TRANSLATE_MAP[ord("\n")]
|
||||
del XML_ESCAPE_TRANSLATE_MAP[ord("\t")]
|
||||
|
||||
# copied from Cockatrice/oracle/src/oracleimporter.h OracleImporter::mainCardTypes
|
||||
MAINTYPES = (
|
||||
"Planeswalker",
|
||||
"Creature",
|
||||
"Land",
|
||||
"Sorcery",
|
||||
"Instant",
|
||||
"Artifact",
|
||||
"Enchantment"
|
||||
)
|
||||
|
||||
class Priority(Enum):
|
||||
FALLBACK = 0
|
||||
PRIMARY = 10
|
||||
SECONDARY = 20
|
||||
REPRINT = 30
|
||||
OTHER = 40
|
||||
|
||||
SET_TYPE_PRIORITY_MAP = {
|
||||
"core": Priority.PRIMARY,
|
||||
"expansion": Priority.PRIMARY,
|
||||
|
||||
"commander": Priority.SECONDARY,
|
||||
"starter": Priority.SECONDARY,
|
||||
"draft_innovation": Priority.SECONDARY,
|
||||
"duel_deck": Priority.SECONDARY,
|
||||
|
||||
"archenemy": Priority.REPRINT,
|
||||
"arsenal": Priority.REPRINT,
|
||||
"box": Priority.REPRINT,
|
||||
"from_the_vault": Priority.REPRINT,
|
||||
"masterpiece": Priority.REPRINT,
|
||||
"masters": Priority.REPRINT,
|
||||
"memorabilia": Priority.REPRINT,
|
||||
"planechase": Priority.REPRINT,
|
||||
"premium_deck": Priority.REPRINT,
|
||||
"promo": Priority.REPRINT,
|
||||
"spellbook": Priority.REPRINT,
|
||||
"token": Priority.REPRINT,
|
||||
"treasure_chest": Priority.REPRINT,
|
||||
|
||||
"alchemy": Priority.OTHER,
|
||||
"funny": Priority.OTHER,
|
||||
"minigame": Priority.OTHER,
|
||||
"vanguard": Priority.OTHER,
|
||||
}
|
||||
|
||||
|
||||
def __get_session() -> Union[requests.Session, Any]:
|
||||
"""
|
||||
Get the session for downloading content
|
||||
:return: Session
|
||||
"""
|
||||
requests_cache.install_cache(
|
||||
cache_name="scryfall_cache", backend="sqlite", expire_after=7200 # 2 hours
|
||||
)
|
||||
|
||||
if not SESSION.get(None):
|
||||
SESSION.set(requests.Session())
|
||||
return SESSION.get()
|
||||
|
||||
|
||||
def json_download(scryfall_url: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get the data from Scryfall in JSON format using our secret keys
|
||||
:param scryfall_url: URL to json_download JSON data from
|
||||
:return: JSON object of the Scryfall data
|
||||
"""
|
||||
session = __get_session()
|
||||
response: Any = session.get(url=scryfall_url, timeout=10.0)
|
||||
request_api_json: Dict[str, Any] = response.json()
|
||||
print("Downloaded: {} (Cache = {})".format(scryfall_url, response.from_cache))
|
||||
return request_api_json
|
||||
|
||||
|
||||
def download_scryfall_set(set_code: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Download a set from scryfall in entirety
|
||||
:param set_code: Set code
|
||||
:return: Card list
|
||||
"""
|
||||
set_content: Dict[str, Any] = json_download(SCRYFALL_SET_URL.format(set_code))
|
||||
if set_content["object"] == "error":
|
||||
print("API download failed for {}: {}".format(set_code, set_content))
|
||||
return []
|
||||
|
||||
spoiler_cards = []
|
||||
download_url = set_content["search_uri"]
|
||||
|
||||
page_downloaded: int = 1
|
||||
while download_url:
|
||||
page_downloaded += 1
|
||||
|
||||
cards = json_download(download_url)
|
||||
if cards["object"] == "error":
|
||||
print("Set {} has no cards, skipping".format(set_code))
|
||||
break
|
||||
|
||||
for card in cards["data"]:
|
||||
spoiler_cards.append(card)
|
||||
|
||||
if not cards.get("has_more"):
|
||||
break
|
||||
|
||||
download_url = cards["next_page"]
|
||||
|
||||
return sorted(spoiler_cards, key=lambda c: (c["name"], c["collector_number"]))
|
||||
|
||||
|
||||
def build_types(sf_card: Dict[str, Any]) -> Tuple[List[str], str, List[str]]:
|
||||
"""
|
||||
Build the super, type, and sub-types of a given card
|
||||
:param sf_card: Scryfall card
|
||||
:return: Tuple of types
|
||||
"""
|
||||
all_super_types = ["Legendary", "Snow", "Elite", "Basic", "World", "Ongoing"]
|
||||
|
||||
# return values
|
||||
super_types: List[str] = []
|
||||
sub_types: List[str] = []
|
||||
|
||||
# Spoiler cards do not always include a type_line
|
||||
type_line = sf_card.get("type_line", "")
|
||||
if not type_line:
|
||||
type_line = "Unknown"
|
||||
|
||||
if "—" in type_line:
|
||||
card_subs = type_line.split("—")[1].strip()
|
||||
sub_types = card_subs.split(" ") if " " in card_subs else [card_subs]
|
||||
|
||||
for card_type in all_super_types:
|
||||
if card_type in type_line:
|
||||
super_types.append(card_type)
|
||||
|
||||
types: str = type_line.split("—")[0]
|
||||
for card_type in all_super_types:
|
||||
types = types.replace(card_type, "")
|
||||
|
||||
return super_types, types, sub_types
|
||||
|
||||
|
||||
def scryfall2mtgjson(scryfall_cards: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Convert SF cards to MTGJSON v4 format for dispatching
|
||||
:param scryfall_cards: List of Scryfall cards
|
||||
:return: MTGJSON card list
|
||||
"""
|
||||
trice_cards = []
|
||||
|
||||
composed_sf_cards = []
|
||||
|
||||
# Handle split/transform cards
|
||||
for sf_card in scryfall_cards:
|
||||
if "layout" in sf_card.keys():
|
||||
if sf_card["layout"] in ["transform", "split"]:
|
||||
# Make a copy for zoning
|
||||
combined_sides = sf_card.copy()
|
||||
del combined_sides["card_faces"]
|
||||
|
||||
# Quick pointers
|
||||
face_0 = sf_card["card_faces"][0]
|
||||
face_1 = sf_card["card_faces"][1]
|
||||
|
||||
# Update data for the combined
|
||||
combined_sides["layout"] = "double-faced"
|
||||
combined_sides["names"] = [face_0["name"], face_1["name"]]
|
||||
|
||||
# Re-structure two cards into singletons
|
||||
front_side = {**combined_sides, **face_0}
|
||||
back_side = {**combined_sides, **face_1}
|
||||
|
||||
# Uniquify them
|
||||
front_side["collector_number"] += "a"
|
||||
back_side["collector_number"] += "b"
|
||||
|
||||
# And continue on our journey
|
||||
composed_sf_cards.extend([front_side, back_side])
|
||||
else:
|
||||
composed_sf_cards.append(sf_card)
|
||||
|
||||
# Build trice cards from SF cards
|
||||
for sf_card in composed_sf_cards:
|
||||
super_types, types, sub_types = build_types(sf_card)
|
||||
|
||||
if "card_faces" in sf_card:
|
||||
image = (
|
||||
sf_card["card_faces"][0]
|
||||
.get("image_uris", {})
|
||||
.get("normal", "")
|
||||
)
|
||||
else:
|
||||
image = sf_card.get("image_uris", {}).get("normal", "")
|
||||
|
||||
try:
|
||||
trice_card = {
|
||||
"cmc": sf_card["cmc"],
|
||||
"names": sf_card.get("names", None),
|
||||
"mana_cost": sf_card.get("mana_cost", ""),
|
||||
"name": sf_card["name"],
|
||||
"number": sf_card["collector_number"],
|
||||
"rarity": sf_card["rarity"].replace("mythic", "mythic rare").title(),
|
||||
"text": sf_card.get("oracle_text", ""),
|
||||
"url": image,
|
||||
"type": sf_card.get("type_line", "Unknown"),
|
||||
"colorIdentity": sf_card.get("color_identity", None),
|
||||
"colors": sf_card.get("colors", []),
|
||||
"power": sf_card.get("power", None),
|
||||
"toughness": sf_card.get("toughness", None),
|
||||
"layout": sf_card["layout"].replace("normal", ""),
|
||||
"loyalty": sf_card.get("loyalty", None),
|
||||
"artist": sf_card.get("artist", ""),
|
||||
"flavor": sf_card.get("flavor_text", None),
|
||||
"multiverseId": sf_card.get("multiverse_id", None),
|
||||
"superTypes": super_types,
|
||||
"types": types,
|
||||
"subTypes": sub_types,
|
||||
}
|
||||
trice_cards.append(trice_card)
|
||||
|
||||
except Exception as e:
|
||||
# If running in GitHub Actions CI, print the message as a warning
|
||||
if 'GITHUB_ACTION' in os.environ:
|
||||
print(f'::warning::Unable to parse "{sf_card.get("name")}" ({sf_card.get("set").upper()}): {str(e)}')
|
||||
else:
|
||||
print(f'Unable to parse "{sf_card.get("name")}" ({sf_card.get("set").upper()}): {str(e)}')
|
||||
|
||||
return trice_cards
|
||||
|
||||
|
||||
def open_header(card_xml_file: IO[Any], filename: str) -> None:
|
||||
"""
|
||||
Add the header data to the XML file
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write(
|
||||
"<cockatrice_carddatabase version='4' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xsi:schemaLocation='https://raw.githubusercontent.com/Cockatrice/Cockatrice/master/doc/carddatabase_v4/cards.xsd'>\n"
|
||||
+ " <!--\n"
|
||||
+ " Created At: " + datetime.datetime.now(datetime.timezone.utc).strftime("%a, %b %d %Y, %H:%M:%S") + " (UTC)\n"
|
||||
+ " \n"
|
||||
+ " THIS FILE IS AUTOMATICALLY GENERATED & ALL EDITS WILL BE OVERRIDDEN.\n"
|
||||
+ " -->\n"
|
||||
+ "<info>\n"
|
||||
+ " <author>Cockatrice/Magic-Spoiler</author>\n"
|
||||
+ " <createdAt>" + datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S") + " (UTC)</createdAt>\n"
|
||||
+ " <sourceUrl>https://raw.githubusercontent.com/Cockatrice/Magic-Spoiler/files/" + filename + "</sourceUrl>\n"
|
||||
+ " <sourceVersion></sourceVersion>\n"
|
||||
+ "</info>\n"
|
||||
+ "<sets>\n"
|
||||
)
|
||||
|
||||
|
||||
def fill_header_sets(card_xml_file: IO[Any], set_obj: Dict[str, str]) -> None:
|
||||
"""
|
||||
Add header data for set files
|
||||
:param card_xml_file: Card file path
|
||||
:param set_obj: Set object
|
||||
"""
|
||||
priority = SET_TYPE_PRIORITY_MAP.get(set_obj["set_type"].lower(), Priority.FALLBACK)
|
||||
card_xml_file.write(
|
||||
"<set>\n"
|
||||
"<name>" + set_obj["code"] + "</name>\n"
|
||||
"<longname>" + set_obj["name"] + " (Spoiler)</longname>\n"
|
||||
"<settype>" + set_obj["set_type"].replace("_", " ").title() + "</settype>\n"
|
||||
"<releasedate>" + set_obj["released_at"] + "</releasedate>\n"
|
||||
"<priority>" + str(priority.value) + "</priority>\n"
|
||||
"</set>\n"
|
||||
)
|
||||
|
||||
|
||||
def close_header(card_xml_file: IO[Any]) -> None:
|
||||
"""
|
||||
Add closing data to files
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write("</sets>\n<cards>\n")
|
||||
|
||||
|
||||
def close_xml_file(card_xml_file: IO[Any]) -> None:
|
||||
"""
|
||||
Add final touch to files to validate them,
|
||||
then pretty them
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write("</cards>\n</cockatrice_carddatabase>\n")
|
||||
card_xml_file.close()
|
||||
|
||||
# Make the files pretty and add xml declaration
|
||||
parser = etree.XMLParser(remove_blank_text=True)
|
||||
root = etree.parse(card_xml_file.name, parser).getroot()
|
||||
with pathlib.Path(card_xml_file.name).open("wb") as f:
|
||||
f.write(etree.tostring(root, encoding="UTF-8", xml_declaration=True, pretty_print=True))
|
||||
|
||||
|
||||
def xml_escape(text):
|
||||
return text.translate(XML_ESCAPE_TRANSLATE_MAP)
|
||||
|
||||
|
||||
def write_cards(
|
||||
card_xml_file: Any, trice_dict: List[Dict[str, Any]], set_code: str
|
||||
) -> None:
|
||||
"""
|
||||
Given a list of cards, write the cards to an output file
|
||||
:param card_xml_file: Output file to write to
|
||||
:param trice_dict: List of cards
|
||||
:param set_code: Set code
|
||||
"""
|
||||
for card in trice_dict:
|
||||
if "names" in card.keys() and card["names"]:
|
||||
if "layout" in card and card["layout"] != "double-faced":
|
||||
if card["name"] == card["names"][1]:
|
||||
continue
|
||||
|
||||
set_name = card["name"]
|
||||
|
||||
if "mana_cost" in card.keys():
|
||||
mana_cost = card["mana_cost"].replace("{", "").replace("}", "")
|
||||
else:
|
||||
mana_cost = ""
|
||||
|
||||
if "power" in card.keys() or "toughness" in card.keys():
|
||||
if card["power"]:
|
||||
pow_tough = str(card["power"]) + "/" + str(card["toughness"])
|
||||
else:
|
||||
pow_tough = ""
|
||||
else:
|
||||
pow_tough = ""
|
||||
|
||||
if "loyalty" in card.keys() and card["loyalty"]:
|
||||
loyalty = str(card["loyalty"])
|
||||
else:
|
||||
loyalty = ""
|
||||
|
||||
if "text" in card.keys():
|
||||
text = card["text"]
|
||||
else:
|
||||
text = ""
|
||||
|
||||
card_cmc = str(card["cmc"])
|
||||
if card_cmc.endswith(".0"):
|
||||
card_cmc = card_cmc[:-2]
|
||||
|
||||
card_type = card["type"]
|
||||
|
||||
table_row = "1"
|
||||
if "Land" in card_type:
|
||||
table_row = "0"
|
||||
elif "Sorcery" in card_type:
|
||||
table_row = "3"
|
||||
elif "Instant" in card_type:
|
||||
table_row = "3"
|
||||
elif "Creature" in card_type:
|
||||
table_row = "2"
|
||||
|
||||
for maintype in MAINTYPES:
|
||||
if maintype in card_type:
|
||||
break
|
||||
else:
|
||||
maintype = None
|
||||
|
||||
if "names" in card.keys():
|
||||
if "layout" in card:
|
||||
if card["layout"] == "split" or card["layout"] == "aftermath":
|
||||
if "names" in card:
|
||||
if card["name"] == card["names"][0]:
|
||||
for json_card in trice_dict:
|
||||
if json_card["name"] == card["names"][1]:
|
||||
card_type += " // " + json_card["type"]
|
||||
new_mc = ""
|
||||
if "mana_cost" in json_card:
|
||||
new_mc = json_card["mana_cost"]
|
||||
mana_cost += " // " + new_mc.replace(
|
||||
"{", ""
|
||||
).replace("}", "")
|
||||
card_cmc += " // " + str(json_card["cmc"])
|
||||
text += "\n---\n" + json_card["text"]
|
||||
set_name += " // " + json_card["name"]
|
||||
elif card["layout"] == "double-faced":
|
||||
if "names" not in card.keys():
|
||||
print(card["name"] + ' is double-faced but no "names" key')
|
||||
else:
|
||||
pass
|
||||
else:
|
||||
print(card["name"] + " has multiple names and no 'layout' key")
|
||||
|
||||
if "number" in card:
|
||||
if "b" in str(card["number"]):
|
||||
if "layout" in card:
|
||||
if card["layout"] == "split" or card["layout"] == "aftermath":
|
||||
continue
|
||||
|
||||
set_name, mana_cost, card_cmc, card_type, pow_tough, table_row, text, loyalty = map(
|
||||
xml_escape,
|
||||
[set_name, mana_cost, card_cmc, card_type, pow_tough, table_row, text, loyalty],
|
||||
)
|
||||
card_xml_file.write("<card>\n")
|
||||
card_xml_file.write("<name>" + set_name + "</name>\n")
|
||||
card_xml_file.write("<text>" + text + "</text>\n")
|
||||
card_xml_file.write("<prop>\n")
|
||||
if "colors" in card.keys() and card["colors"]:
|
||||
card_xml_file.write("<colors>" + "".join(card["colors"]) + "</colors>\n")
|
||||
|
||||
card_xml_file.write("<type>" + card_type + "</type>\n")
|
||||
if maintype:
|
||||
card_xml_file.write("<maintype>" + maintype + "</maintype>\n")
|
||||
|
||||
card_xml_file.write("<cmc>" + card_cmc + "</cmc>\n")
|
||||
if mana_cost:
|
||||
card_xml_file.write("<manacost>" + mana_cost + "</manacost>\n")
|
||||
|
||||
if pow_tough:
|
||||
card_xml_file.write("<pt>" + pow_tough + "</pt>\n")
|
||||
|
||||
if loyalty:
|
||||
card_xml_file.write("<loyalty>" + loyalty + "</loyalty>\n")
|
||||
|
||||
card_xml_file.write("</prop>\n")
|
||||
card_xml_file.write(
|
||||
'<set rarity="'
|
||||
+ str(card["rarity"])
|
||||
+ '" picURL="'
|
||||
+ str(card["url"])
|
||||
+ '">'
|
||||
+ str(set_code)
|
||||
+ "</set>\n"
|
||||
)
|
||||
if set_name + " enters the battlefield tapped" in text:
|
||||
card_xml_file.write("<cipt>1</cipt>\n")
|
||||
|
||||
card_xml_file.write("<tablerow>" + table_row + "</tablerow>\n")
|
||||
card_xml_file.write("</card>\n")
|
||||
|
||||
|
||||
def write_spoilers_xml(trice_dicts: Dict[str, List[Dict[str, Any]]]) -> bool:
|
||||
"""
|
||||
Write the spoiler.xml file
|
||||
:param trice_dicts: Dict of dict entries
|
||||
:return: Written successfully
|
||||
"""
|
||||
output_file_name = "spoiler.xml"
|
||||
|
||||
pathlib.Path("out").mkdir(parents=True, exist_ok=True)
|
||||
card_xml_file = OUTPUT_TMP_DIR.joinpath(output_file_name).open("w", encoding="utf-8")
|
||||
|
||||
# Fill in set headers
|
||||
open_header(card_xml_file, output_file_name)
|
||||
for value in SPOILER_SETS.get():
|
||||
fill_header_sets(card_xml_file, {key: (value_ + SPOILER_MARK if key == "code" else value_) for key, value_ in value.items()})
|
||||
close_header(card_xml_file)
|
||||
|
||||
# Write in all the cards
|
||||
for value in SPOILER_SETS.get():
|
||||
try:
|
||||
write_cards(card_xml_file, trice_dicts[value["code"]], value["code"] + SPOILER_MARK)
|
||||
except KeyError:
|
||||
print("Skipping " + value["code"])
|
||||
|
||||
close_xml_file(card_xml_file)
|
||||
|
||||
old_xml_location = str(OUTPUT_DIR.joinpath(output_file_name))
|
||||
if compare_xml_content(card_xml_file.name, old_xml_location):
|
||||
print("No new data in spoiler.xml, skipping replacement")
|
||||
return False
|
||||
|
||||
# Move new version to old location
|
||||
print("Changes detected, replacing spoiler.xml with updated version")
|
||||
shutil.move(card_xml_file.name, old_xml_location)
|
||||
return True
|
||||
|
||||
|
||||
def compare_xml_content(a: str, b: str) -> bool:
|
||||
"""
|
||||
Compare the contents of two XML files and report
|
||||
if the contents are the same, minus the info part and comments
|
||||
:param a: File a
|
||||
:param b: File b
|
||||
:return: Is file content, minus info and comments, the same?
|
||||
"""
|
||||
files = [pathlib.Path(file_n) for file_n in (a, b)]
|
||||
|
||||
if all([filepath.is_file() for filepath in files]):
|
||||
hashes = []
|
||||
for filepath in files:
|
||||
parser = etree.XMLParser(remove_blank_text=True)
|
||||
root = etree.parse(str(filepath), parser).getroot()
|
||||
etree.strip_elements(root, "info", etree.Comment)
|
||||
digest = hashlib.sha512(etree.tostring(root)).hexdigest()
|
||||
hashes.append(digest)
|
||||
|
||||
return hashes[0] == hashes[1]
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def write_set_xml(trice_dict: List[Dict[str, Any]], set_obj: Dict[str, str]) -> bool:
|
||||
"""
|
||||
Write out a single magic set to XML format
|
||||
:param trice_dict: Cards to print
|
||||
:param set_obj: Set object
|
||||
:return: Written successfully
|
||||
"""
|
||||
if not trice_dict:
|
||||
return False
|
||||
|
||||
OUTPUT_TMP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
set_code = set_obj["code"]
|
||||
file_path = OUTPUT_TMP_DIR.joinpath(f"{set_code}.xml")
|
||||
card_xml_file = file_path.open("w", encoding="utf-8")
|
||||
|
||||
open_header(card_xml_file, file_path.name)
|
||||
fill_header_sets(card_xml_file, set_obj)
|
||||
close_header(card_xml_file)
|
||||
write_cards(card_xml_file, trice_dict, set_obj["code"])
|
||||
close_xml_file(card_xml_file)
|
||||
|
||||
# If content didn't change, discard newest creation
|
||||
old_xml_location = str(OUTPUT_DIR.joinpath("{}.xml".format(set_obj["code"])))
|
||||
if compare_xml_content(card_xml_file.name, old_xml_location):
|
||||
print("No new data in {}.xml, skipping replacement".format(set_obj["code"]))
|
||||
return False
|
||||
|
||||
# Move new version to old location
|
||||
print(
|
||||
"Changes detected, replacing {}.xml with updated version".format(
|
||||
set_obj["code"]
|
||||
)
|
||||
)
|
||||
shutil.move(card_xml_file.name, old_xml_location)
|
||||
return True
|
||||
|
||||
|
||||
def get_spoiler_sets() -> List[Dict[str, str]]:
|
||||
"""
|
||||
Download Sf sets and mark spoiler sets
|
||||
:return: Spoiler sets
|
||||
"""
|
||||
sf_sets = json_download(SCRYFALL_SET_URL.format(""))
|
||||
if sf_sets["object"] == "error":
|
||||
print("Unable to download SF correctly: {}".format(sf_sets))
|
||||
return []
|
||||
|
||||
spoiler_sets = []
|
||||
# Find list of possible Set Types to exclude here: https://scryfall.com/docs/api/sets
|
||||
excluded_set_types = ["alchemy", "masterpiece", "arsenal", "from_the_vault", "spellbook", "premium_deck", "duel_deck",
|
||||
"draft_innovation", "treasure_chest", "planechase", "archenemy", "vanguard", "box", "promo",
|
||||
"token", "memorabilia", "minigame"]
|
||||
|
||||
for sf_set in sf_sets["data"]:
|
||||
if (
|
||||
sf_set["released_at"] >= time.strftime("%Y-%m-%d %H:%M:%S")
|
||||
and sf_set["set_type"] not in excluded_set_types
|
||||
and sf_set["card_count"]
|
||||
):
|
||||
sf_set["code"] = sf_set["code"].upper()
|
||||
spoiler_sets.append(sf_set)
|
||||
|
||||
return spoiler_sets
|
||||
|
||||
|
||||
def delete_old_files() -> bool:
|
||||
"""
|
||||
Delete files that are no longer necessary within the program
|
||||
:return: Files were deleted
|
||||
"""
|
||||
valid_files = [x["code"].upper() for x in SPOILER_SETS.get()] + [
|
||||
"spoiler",
|
||||
"SpoilerSeasonEnabled",
|
||||
"README",
|
||||
]
|
||||
|
||||
deleted = False
|
||||
for output_file in OUTPUT_DIR.glob("*"):
|
||||
if not output_file.is_file():
|
||||
continue
|
||||
|
||||
if output_file.stem not in valid_files:
|
||||
output_file.unlink()
|
||||
deleted = True
|
||||
|
||||
if OUTPUT_TMP_DIR.is_dir():
|
||||
shutil.rmtree(OUTPUT_TMP_DIR)
|
||||
|
||||
enabled_path = OUTPUT_DIR.joinpath("SpoilerSeasonEnabled")
|
||||
if not SPOILER_SETS.get():
|
||||
enabled_path.unlink(missing_ok=True)
|
||||
else:
|
||||
enabled_path.open("w", encoding="utf-8").write(" ")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Main dispatch thread
|
||||
"""
|
||||
|
||||
# Determine what sets have spoiler data
|
||||
SPOILER_SETS.set(get_spoiler_sets())
|
||||
|
||||
spoiler_xml = {}
|
||||
changed = False
|
||||
for set_info in SPOILER_SETS.get():
|
||||
print("Handling {}".format(set_info["code"]))
|
||||
|
||||
cards = download_scryfall_set(set_info["code"])
|
||||
trice_dict = scryfall2mtgjson(cards)
|
||||
|
||||
# Write SET.xml
|
||||
changed |= write_set_xml(trice_dict, set_info)
|
||||
|
||||
# Save for spoiler.xml
|
||||
spoiler_xml[set_info["code"]] = trice_dict
|
||||
|
||||
if spoiler_xml:
|
||||
# Write out the spoiler.xml file
|
||||
changed |= write_spoilers_xml(spoiler_xml)
|
||||
|
||||
# Cleanup outdated stuff that's not necessary
|
||||
changed |= delete_old_files()
|
||||
|
||||
# Enable deployment on changes (used in CI)
|
||||
try:
|
||||
github_output = os.environ["GITHUB_OUTPUT"]
|
||||
except KeyError:
|
||||
print(f"not in ci but deploy={str(changed).lower()}")
|
||||
else:
|
||||
with open(github_output, "a") as fp:
|
||||
print(f"deploy={str(changed).lower()}", file=fp)
|
||||
|
||||
if not changed:
|
||||
print("::notice title=No updates available::"
|
||||
"No new spoiler cards found for deployment")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
195
main.py
195
main.py
|
|
@ -1,195 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import spoilers
|
||||
import mtgs_scraper
|
||||
import scryfall_scraper
|
||||
import mythic_scraper
|
||||
import wizards_scraper
|
||||
import os
|
||||
import json
|
||||
import io
|
||||
import sys
|
||||
import verify_files
|
||||
import requests
|
||||
import yaml
|
||||
from lxml import etree
|
||||
|
||||
presets = {
|
||||
"isfullspoil": False, # when full spoil comes around, we only want to use WOTC images
|
||||
"includeMasterpieces": True, # if the set has masterpieces, let's get those too
|
||||
"oldRSS": False, # maybe MTGS hasn't updated their spoiler.rss but new cards have leaked
|
||||
"dumpXML": False, # let travis print XML for testing
|
||||
# only use Scryfall data (no mtgs for ANY sets)
|
||||
"scryfallOnly": False,
|
||||
"dumpErrors": True # print the error log from out/errors.json
|
||||
}
|
||||
|
||||
setinfos = verify_files.load_file('set_info.yml','yaml_multi')
|
||||
manual_sets = verify_files.load_file('cards_manual.yml','yaml')
|
||||
card_corrections = verify_files.load_file('cards_corrections.yml','yaml')
|
||||
delete_cards = verify_files.load_file('cards_delete.yml','yaml')
|
||||
|
||||
errorlog = []
|
||||
|
||||
# TODO insert configparser to add config.ini file
|
||||
|
||||
|
||||
def parseargs():
|
||||
for argument in sys.argv:
|
||||
for preset in presets:
|
||||
if argument.split('=')[0].lower().replace('-', '') == preset.lower():
|
||||
argvalue = argument.split('=')[1]
|
||||
if argvalue in ['true', 'True', 'T', 't']:
|
||||
argvalue = True
|
||||
elif argvalue in ['false', 'False', 'F', 'f']:
|
||||
argvalue = False
|
||||
presets[preset] = argvalue
|
||||
print "Setting preset " + preset + " to value " + str(argvalue)
|
||||
|
||||
|
||||
def save_allsets(AllSets):
|
||||
with io.open('out/AllSets.json', 'w', encoding='utf8') as json_file:
|
||||
data = json.dumps(AllSets, ensure_ascii=False, encoding='utf8',
|
||||
indent=2, sort_keys=True, separators=(',', ':'))
|
||||
json_file.write(unicode(data))
|
||||
|
||||
|
||||
def save_masterpieces(masterpieces, setinfo):
|
||||
with open('out/' + setinfo['masterpieces']['code'] + '.json', 'w') as outfile:
|
||||
json.dump(masterpieces, outfile, sort_keys=True,
|
||||
indent=2, separators=(',', ': '))
|
||||
|
||||
|
||||
def save_setjson(mtgs, filename):
|
||||
with io.open('out/' + filename + '.json', 'w', encoding='utf8') as json_file:
|
||||
data = json.dumps(mtgs, ensure_ascii=False, encoding='utf8',
|
||||
indent=2, sort_keys=True, separators=(',', ':'))
|
||||
json_file.write(unicode(data))
|
||||
|
||||
|
||||
def save_errorlog(errorlog):
|
||||
with open('out/errors.yml', 'w') as outfile:
|
||||
yaml.safe_dump(errorlog, outfile, default_flow_style=False)
|
||||
|
||||
|
||||
def save_xml(xmlstring, outfile):
|
||||
if os.path.exists(outfile):
|
||||
append_or_write = 'w'
|
||||
else:
|
||||
append_or_write = 'w'
|
||||
with open(outfile, append_or_write) as xmlfile:
|
||||
xmlfile.write(xmlstring.encode('utf-8'))
|
||||
|
||||
|
||||
def verify_xml(file, schema):
|
||||
try:
|
||||
schema_doc = etree.fromstring(schema)
|
||||
except Exception as e:
|
||||
print "XSD for " + file + " is invalid"
|
||||
print schema
|
||||
print e
|
||||
return False
|
||||
xml_schema = etree.XMLSchema(schema_doc)
|
||||
try:
|
||||
xml_doc = etree.parse(file)
|
||||
except Exception as e:
|
||||
print "XML file " + file + " is invalid"
|
||||
print e
|
||||
return False
|
||||
try:
|
||||
xml_schema.assert_(xml_doc)
|
||||
except:
|
||||
xsd_errors = xml_schema.error_log
|
||||
print "Errors validating XML file " + file + " against XSD:"
|
||||
for error in xsd_errors:
|
||||
print error
|
||||
sys.exit("Error: " + file + " does not pass Cockatrice XSD validation.")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parseargs()
|
||||
AllSets = spoilers.get_allsets() # get AllSets from mtgjson
|
||||
combinedjson = {}
|
||||
noCards = []
|
||||
for setinfo in setinfos:
|
||||
if setinfo['code'] in AllSets:
|
||||
print "Found " +setinfo['code']+ " set from set_info.yml in MTGJSON, not adding it"
|
||||
continue
|
||||
if presets['oldRSS'] or 'noRSS' in setinfo and setinfo['noRSS']:
|
||||
mtgs = {"cards": []}
|
||||
else:
|
||||
mtgs = mtgs_scraper.scrape_mtgs(
|
||||
'http://www.mtgsalvation.com/spoilers.rss') # scrape mtgs rss feed
|
||||
mtgs = mtgs_scraper.parse_mtgs(mtgs, setinfo=setinfo) # parse spoilers into mtgjson format
|
||||
if manual_sets and manual_sets != '' and setinfo['code'] in manual_sets:
|
||||
manual_cards = manual_sets[setinfo['code']]
|
||||
else:
|
||||
manual_cards = []
|
||||
mtgs = spoilers.correct_cards(
|
||||
mtgs, manual_cards, card_corrections, delete_cards['delete']) # fix using the fixfiles
|
||||
mtgjson = spoilers.get_image_urls(mtgs, presets['isfullspoil'], setinfo) # get images
|
||||
if presets['scryfallOnly'] or 'scryfallOnly' in setinfo and setinfo['scryfallOnly']:
|
||||
scryfall = scryfall_scraper.get_scryfall(
|
||||
'https://api.scryfall.com/cards/search?q=++e:' + setinfo['code'].lower())
|
||||
mtgjson = scryfall #_scraper.smash_mtgs_scryfall(mtgs, scryfall)
|
||||
if 'fullSpoil' in setinfo and setinfo['fullSpoil']:
|
||||
wotc = wizards_scraper.scrape_fullspoil('', setinfo)
|
||||
wizards_scraper.smash_fullspoil(mtgjson, wotc)
|
||||
[mtgjson, errors] = spoilers.error_check(
|
||||
mtgjson, card_corrections) # check for errors where possible
|
||||
errorlog += errors
|
||||
if not 'cards' in mtgjson or mtgjson['cards'] == [] or not mtgjson['cards']:
|
||||
noCards.append(setinfo['code'])
|
||||
continue
|
||||
spoilers.write_xml(
|
||||
mtgjson, setinfo['code'], setinfo['name'], setinfo['releaseDate'])
|
||||
#save_xml(spoilers.pretty_xml(setinfo['code']), 'out/spoiler.xml')
|
||||
mtgjson = spoilers.add_headers(mtgjson, setinfo)
|
||||
AllSets = spoilers.make_allsets(AllSets, mtgjson, setinfo['code'])
|
||||
if 'masterpieces' in setinfo: # repeat all of the above for masterpieces
|
||||
# masterpieces aren't in the rss feed, so for the new cards, we'll go to their individual pages on mtgs
|
||||
# old cards will get their infos copied from mtgjson (including fields that may not apply like 'artist')
|
||||
# the images will still come from mtgs
|
||||
masterpieces = spoilers.make_masterpieces(
|
||||
setinfo['masterpieces'], AllSets, mtgjson)
|
||||
[masterpieces, errors] = spoilers.error_check(masterpieces)
|
||||
errorlog += errors
|
||||
spoilers.write_xml(masterpieces, setinfo['masterpieces']['code'],
|
||||
setinfo['masterpieces']['name'], setinfo['masterpieces']['releaseDate'])
|
||||
AllSets = spoilers.make_allsets(
|
||||
AllSets, masterpieces, setinfo['masterpieces']['code'])
|
||||
save_masterpieces(masterpieces, setinfo)
|
||||
save_xml(spoilers.pretty_xml('out/' + setinfo['masterpieces']['code'] + '.xml'), 'out/' + setinfo['masterpieces']['code'] + '.xml')
|
||||
combinedjson[setinfo['masterpieces']['code']] = masterpieces
|
||||
if 'cards' in mtgjson and mtgjson['cards'] and not mtgjson['cards'] == []:
|
||||
save_setjson(mtgjson, setinfo['code'])
|
||||
combinedjson[setinfo['code']] = mtgjson
|
||||
if os.path.isfile('out/' + setinfo['code'] + '.xml'):
|
||||
save_xml(spoilers.pretty_xml('out/' + setinfo['code'] + '.xml'), 'out/' + setinfo['code'] + '.xml')
|
||||
if noCards != []:
|
||||
print("Not processing set(s) with no cards: {}".format(noCards))
|
||||
save_setjson(combinedjson, 'spoiler')
|
||||
spoilers.write_combined_xml(combinedjson, setinfos)
|
||||
save_xml(spoilers.pretty_xml('out/spoiler.xml'), 'out/spoiler.xml')
|
||||
cockatrice_xsd = requests.get('https://raw.githubusercontent.com/Cockatrice/Cockatrice/master/doc/cards.xsd').text
|
||||
if verify_xml('out/spoiler.xml', cockatrice_xsd): # check if our XML passes Cockatrice's XSD
|
||||
print 'spoiler.xml passes Cockatrice XSD verification'
|
||||
else:
|
||||
print 'spoiler.xml fails Cockatrice XSD verification'
|
||||
errorlog = spoilers.remove_corrected_errors(errorlog, card_corrections)
|
||||
save_errorlog(errorlog)
|
||||
save_allsets(AllSets)
|
||||
# save_setjson(mtgjson)
|
||||
if presets['dumpXML']:
|
||||
print '<!----- DUMPING SPOILER.XML -----!>'
|
||||
with open('out/spoiler.xml', 'r') as xmlfile:
|
||||
print xmlfile.read()
|
||||
print '<!----- END XML DUMP -----!>'
|
||||
if presets['dumpErrors']:
|
||||
if errorlog != {}:
|
||||
print '//----- DUMPING ERROR LOG -----'
|
||||
print yaml.safe_dump(errorlog, default_flow_style=False)
|
||||
print '//----- END ERROR LOG -----'
|
||||
else:
|
||||
print "No Detected Errors!"
|
||||
275
mtgs_scraper.py
275
mtgs_scraper.py
|
|
@ -1,275 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
import feedparser
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from lxml import html
|
||||
|
||||
|
||||
def scrape_mtgs(url):
|
||||
return requests.get(url, headers={'Cache-Control': 'no-cache', 'Pragma': 'no-cache', 'Expires': 'Thu, 01 Jan 1970 00:00:00 GMT'}).text
|
||||
|
||||
|
||||
def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], related_cards=[], setinfo={"mtgsurl": ""}):
|
||||
mtgs = mtgs.replace('utf-16', 'utf-8')
|
||||
patterns = ['<b>Name:</b> <b>(?P<name>.*?)<',
|
||||
'Cost: (?P<cost>[X]*\d{0,2}[XWUBRGC]*?)<',
|
||||
'Type: (?P<type>.*?)<',
|
||||
'Pow/Tgh: (?P<pow>.*?)<',
|
||||
'Rules Text: (?P<rules>.*?)<br /',
|
||||
'Rarity: (?P<rarity>.*?)<',
|
||||
'Set Number: #(?P<setnumber>.*?)/'
|
||||
]
|
||||
d = feedparser.parse(mtgs)
|
||||
|
||||
cards = []
|
||||
for entry in d.items()[5][1]:
|
||||
card = dict(cost='', cmc='', img='', pow='', name='', rules='', type='',
|
||||
color='', altname='', colorIdentity='', colorArray=[], colorIdentityArray=[], setnumber='', rarity='')
|
||||
summary = entry['summary']
|
||||
for pattern in patterns:
|
||||
match = re.search(pattern, summary, re.MULTILINE | re.DOTALL)
|
||||
if match:
|
||||
dg = match.groupdict()
|
||||
card[dg.items()[0][0]] = dg.items()[0][1]
|
||||
cards.append(card)
|
||||
|
||||
gallery_list = list_mtgs_gallery(setinfo['mtgsurl'])
|
||||
for card in cards:
|
||||
if card['name'] not in gallery_list:
|
||||
print "Removing card scraped from MTGS RSS but not in their gallery: " + card['name']
|
||||
cards.remove(card)
|
||||
|
||||
for card in cards:
|
||||
card['name'] = card['name'].strip()
|
||||
|
||||
# if we didn't find any cards, let's bail out to prevent overwriting good data
|
||||
if len(cards) < 1:
|
||||
sys.exit("No cards found, exiting to prevent file overwrite")
|
||||
|
||||
cards2 = []
|
||||
for card in cards:
|
||||
if 'rules' in card:
|
||||
htmltags = re.compile(r'<.*?>')
|
||||
card['rules'] = htmltags.sub('', card['rules'])
|
||||
if '//' in card['name'] or 'Aftermath' in card['rules']:
|
||||
print 'Splitting up Aftermath card ' + card['name']
|
||||
card1 = card.copy()
|
||||
card2 = dict(cost='', cmc='', img='', pow='', name='', rules='', type='',
|
||||
color='', altname='', colorIdentity='', colorArray=[], colorIdentityArray=[], setnumber='', rarity='')
|
||||
if '//' in card['name']:
|
||||
card['name'] = card['name'].replace(' // ', '//')
|
||||
card1['name'] = card['name'].split('//')[0]
|
||||
card2["name"] = card['name'].split('//')[1]
|
||||
else:
|
||||
card1['name'] = card['name']
|
||||
card2["name"] = card['rules'].split(
|
||||
'\n\n')[1].strip().split(' {')[0]
|
||||
card1['rules'] = card['rules'].split('\n\n')[0].strip()
|
||||
card2["rules"] = "Aftermath" + card['rules'].split('Aftermath')[1]
|
||||
card2['cost'] = re.findall(
|
||||
r'{.*}', card['rules'])[0].replace('{', '').replace('}', '').upper()
|
||||
card2['type'] = re.findall(
|
||||
r'}\n.*\n', card['rules'])[0].replace('}', '').replace('\n', '')
|
||||
if 'setnumber' in card:
|
||||
card1['setnumber'] = card['setnumber'] + 'a'
|
||||
card2['setnumber'] = card['setnumber'] + 'b'
|
||||
if 'rarity' in card:
|
||||
card2['rarity'] = card['rarity']
|
||||
card1['layout'] = 'aftermath'
|
||||
card2['layout'] = 'aftermath'
|
||||
card1['names'] = [card1['name'], card2['name']]
|
||||
card2['names'] = [card1['name'], card2['name']]
|
||||
cards2.append(card1)
|
||||
cards2.append(card2)
|
||||
else:
|
||||
cards2.append(card)
|
||||
cards = cards2
|
||||
|
||||
for card in cards:
|
||||
card['name'] = card['name'].replace(''', '\'')
|
||||
card['rules'] = card['rules'].replace(''', '\'') \
|
||||
.replace('<i>', '') \
|
||||
.replace('</i>', '') \
|
||||
.replace('"', '"') \
|
||||
.replace('blkocking', 'blocking')\
|
||||
.replace('&bull;', u'•')\
|
||||
.replace('•', u'•')\
|
||||
.replace('comes into the', 'enters the')\
|
||||
.replace('threeor', 'three or')\
|
||||
.replace('[i]', '')\
|
||||
.replace('[/i]', '')\
|
||||
.replace('Lawlwss', 'Lawless')\
|
||||
.replace('Costner', "Counter")
|
||||
card['type'] = card['type'].replace(' ', ' ')\
|
||||
.replace('Crature', 'Creature')
|
||||
|
||||
if card['type'][-1] == ' ':
|
||||
card['type'] = card['type'][:-1]
|
||||
|
||||
if 'cost' in card and len(card['cost']) > 0:
|
||||
workingCMC = 0
|
||||
stripCost = card['cost'].replace('{', '').replace('}', '')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
card['cmc'] = workingCMC
|
||||
|
||||
for c in 'WUBRG': # figure out card's color
|
||||
if c not in card['colorIdentity']:
|
||||
if c in card['cost']:
|
||||
card['color'] += c
|
||||
card['colorIdentity'] += c
|
||||
if (c + '}') in card['rules'] or (str.lower(c) + '}') in card['rules']:
|
||||
if not (c in card['colorIdentity']):
|
||||
card['colorIdentity'] += c
|
||||
|
||||
cleanedcards = []
|
||||
for card in cards: # let's remove any cards that are named in delete_cards array
|
||||
if not card['name'] in delete_cards:
|
||||
cleanedcards.append(card)
|
||||
cards = cleanedcards
|
||||
|
||||
cardarray = []
|
||||
for card in cards:
|
||||
dupe = False
|
||||
for dupecheck in cardarray:
|
||||
if dupecheck['name'] == card['name']:
|
||||
dupe = True
|
||||
if dupe == True:
|
||||
continue
|
||||
for cid in card['colorIdentity']:
|
||||
card['colorIdentityArray'].append(cid)
|
||||
if 'W' in card['color']:
|
||||
card['colorArray'].append('White')
|
||||
if 'U' in card['color']:
|
||||
card['colorArray'].append('Blue')
|
||||
if 'B' in card['color']:
|
||||
card['colorArray'].append('Black')
|
||||
if 'R' in card['color']:
|
||||
card['colorArray'].append('Red')
|
||||
if 'G' in card['color']:
|
||||
card['colorArray'].append('Green')
|
||||
cardpower = ''
|
||||
cardtoughness = ''
|
||||
if len(card['pow'].split('/')) > 1:
|
||||
cardpower = card['pow'].split('/')[0]
|
||||
cardtoughness = card['pow'].split('/')[1]
|
||||
cardnames = []
|
||||
cardnumber = card['setnumber'].lstrip('0')
|
||||
if card['name'] in related_cards:
|
||||
cardnames.append(card['name'])
|
||||
cardnames.append(related_cards[card['name']])
|
||||
cardnumber += 'a'
|
||||
card['layout'] = 'double-faced'
|
||||
for namematch in related_cards:
|
||||
if card['name'] == related_cards[namematch]:
|
||||
card['layout'] = 'double-faced'
|
||||
cardnames.append(namematch)
|
||||
if not card['name'] in cardnames:
|
||||
cardnames.append(card['name'])
|
||||
cardnumber += 'b'
|
||||
cardnames = []
|
||||
|
||||
if 'number' in card:
|
||||
if 'b' in card['number'] or 'a' in card['number']:
|
||||
if not 'layout' in card:
|
||||
print card['name'] + " has a a/b number but no 'layout'"
|
||||
card['type'] = card['type'].replace('instant', 'Instant').replace(
|
||||
'sorcery', 'Sorcery').replace('creature', 'Creature')
|
||||
if '-' in card['type']:
|
||||
subtype = card['type'].split(' - ')[1].strip()
|
||||
else:
|
||||
subtype = False
|
||||
if subtype:
|
||||
subtypes = subtype.split(' ')
|
||||
else:
|
||||
subtypes = False
|
||||
if card['cmc'] == '':
|
||||
card['cmc'] = 0
|
||||
cardjson = {}
|
||||
#cardjson["id"] = hashlib.sha1(code + card['name'] + str(card['name']).lower()).hexdigest()
|
||||
cardjson["cmc"] = card['cmc']
|
||||
cardjson["manaCost"] = card['cost']
|
||||
cardjson["name"] = card['name']
|
||||
cardjson["number"] = cardnumber
|
||||
# not sure if mtgjson has a list of acceptable rarities, but my application does
|
||||
# so we'll warn me but continue to write a non-standard rarity (timeshifted?)
|
||||
# may force 'special' in the future
|
||||
if card['rarity'] not in ['Mythic Rare', 'Rare', 'Uncommon', 'Common', 'Special', 'Basic Land']:
|
||||
#errors.append({"name": card['name'], "key": "rarity", "value": card['rarity']})
|
||||
print card['name'] + ' has rarity = ' + card['rarity']
|
||||
if subtypes:
|
||||
cardjson['subtypes'] = subtypes
|
||||
cardjson["rarity"] = card['rarity']
|
||||
cardjson["text"] = card['rules'].replace(". ",". ")
|
||||
cardjson["type"] = card['type']
|
||||
|
||||
workingtypes = card['type']
|
||||
if ' - ' in workingtypes:
|
||||
workingtypes = card['type'].split(' - ')[0]
|
||||
cardjson['types'] = workingtypes.replace('Legendary ', '').replace('Snow ', '')\
|
||||
.replace('Elite ', '').replace('Basic ', '').replace('World ', '').replace('Ongoing ', '')\
|
||||
.strip().split(' ')
|
||||
cardjson["url"] = card['img']
|
||||
|
||||
# optional fields
|
||||
if len(card['colorIdentityArray']) > 0:
|
||||
cardjson["colorIdentity"] = card['colorIdentityArray']
|
||||
if len(card['colorArray']) > 0:
|
||||
cardjson["colors"] = card['colorArray']
|
||||
if len(cardnames) > 1:
|
||||
cardjson["names"] = cardnames
|
||||
if 'names' in card:
|
||||
cardjson['names'] = card['names']
|
||||
if cardpower or cardpower == '0':
|
||||
cardjson["power"] = cardpower
|
||||
cardjson["toughness"] = cardtoughness
|
||||
if card.has_key('loyalty'):
|
||||
cardjson["loyalty"] = card['loyalty']
|
||||
if card.has_key('layout'):
|
||||
cardjson["layout"] = card['layout']
|
||||
|
||||
cardarray.append(cardjson)
|
||||
|
||||
return {"cards": cardarray}
|
||||
|
||||
|
||||
def scrape_mtgs_images(url='http://www.mtgsalvation.com/spoilers/183-hour-of-devastation', mtgscardurl='http://www.mtgsalvation.com/cards/hour-of-devastation/', exemptlist=[]):
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = {}
|
||||
cardstree = tree.xpath('//*[contains(@class, "log-card")]')
|
||||
for child in cardstree:
|
||||
if child.text in exemptlist:
|
||||
continue
|
||||
childurl = mtgscardurl + child.attrib['data-card-id'] + '-' + child.text.replace(
|
||||
' ', '-').replace("'", "").replace(',', '').replace('-//', '')
|
||||
cardpage = requests.get(childurl)
|
||||
tree = html.fromstring(cardpage.content)
|
||||
cardtree = tree.xpath('//img[contains(@class, "card-spoiler-image")]')
|
||||
try:
|
||||
cardurl = cardtree[0].attrib['src']
|
||||
except:
|
||||
cardurl = ''
|
||||
pass
|
||||
cards[child.text] = {
|
||||
"url": cardurl
|
||||
}
|
||||
time.sleep(.2)
|
||||
return cards
|
||||
|
||||
|
||||
def list_mtgs_gallery(url=''):
|
||||
if url == '':
|
||||
return ''
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = []
|
||||
cardstree = tree.xpath('//*[contains(@class, "log-card")]')
|
||||
for child in cardstree:
|
||||
cards.append(child.text)
|
||||
return cards
|
||||
18
mypy.ini
Normal file
18
mypy.ini
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
[mypy]
|
||||
python_version = 3.7
|
||||
|
||||
check_untyped_defs = True
|
||||
disallow_untyped_calls = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_subclassing_any = True
|
||||
follow_imports = normal
|
||||
incremental = True
|
||||
ignore_missing_imports = True
|
||||
strict_optional = True
|
||||
warn_no_return = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
warn_unused_ignores = True
|
||||
|
||||
[mypy-pkg/generated_code/*]
|
||||
ignore_errors = True
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
import time
|
||||
from bs4 import BeautifulSoup as BS
|
||||
from bs4 import Comment
|
||||
|
||||
|
||||
# mtgjson is optional, will ignore cards found if passed
|
||||
def get_mythic_cards(url='http://mythicspoiler.com/ixa/', mtgjson=False):
|
||||
cards = {'cards': []}
|
||||
r = requests.get(url)
|
||||
soup = BS(r.text, "html.parser")
|
||||
cardurls = soup.find_all('a', 'card')
|
||||
urllist = []
|
||||
for cardurl in cardurls:
|
||||
try:
|
||||
urllist.append(url + str(cardurl).split("href=\"")
|
||||
[1].split('"><img')[0])
|
||||
except:
|
||||
pass
|
||||
if not mtgjson:
|
||||
for url in urllist:
|
||||
card = scrape_mythic_card_page(url)
|
||||
if card != '' and 'name' in card and card['name'] != '':
|
||||
cards['cards'].append(scrape_mythic_card_page(url))
|
||||
time.sleep(.5)
|
||||
else:
|
||||
for url in urllist:
|
||||
needsScraped = True
|
||||
for card in mtgjson['cards']:
|
||||
if card['name'].lower().replace(' ', '') in url:
|
||||
needsScraped = False
|
||||
if needsScraped:
|
||||
card = scrape_mythic_card_page(url)
|
||||
if card != '' and 'name' in card and card['name'] != '':
|
||||
mtgjson['cards'].append(card)
|
||||
cards = mtgjson
|
||||
|
||||
return cards
|
||||
|
||||
|
||||
def scrape_mythic_card_page(url):
|
||||
r = requests.get(url)
|
||||
|
||||
soup = BS(r.text, "html.parser")
|
||||
|
||||
comments = soup.find_all(string=lambda text: isinstance(text, Comment))
|
||||
|
||||
card = {}
|
||||
|
||||
for comment in comments:
|
||||
if comment == 'CARD NAME':
|
||||
card['name'] = comment.next_element.strip().replace('"', '')
|
||||
elif comment == 'MANA COST':
|
||||
try:
|
||||
card['manaCost'] = comment.next_element.strip().replace('"', '')
|
||||
except:
|
||||
pass
|
||||
elif comment == 'TYPE':
|
||||
card['type'] = comment.next_element.strip().replace('"', '')
|
||||
elif comment == 'CARD TEXT':
|
||||
buildText = ''
|
||||
for element in comment.next_elements:
|
||||
try:
|
||||
if not element.strip() in ['CARD TEXT', 'FLAVOR TEXT', '']:
|
||||
if buildText != '':
|
||||
buildText += '\n'
|
||||
buildText += element.strip()
|
||||
if element.strip() == 'FLAVOR TEXT':
|
||||
card['text'] = buildText
|
||||
break
|
||||
except:
|
||||
pass
|
||||
elif comment == 'Set Number':
|
||||
try:
|
||||
card['number'] = comment.next_element.strip()
|
||||
except:
|
||||
pass
|
||||
elif comment == 'P/T':
|
||||
try:
|
||||
if comment.next_element.strip().split('/')[0] != '':
|
||||
card['power'] = comment.next_element.strip().split('/')[0]
|
||||
card['toughness'] = comment.next_element.strip().split('/')[1]
|
||||
except:
|
||||
pass
|
||||
|
||||
return card
|
||||
|
|
@ -1,7 +1,3 @@
|
|||
requests
|
||||
feedparser
|
||||
lxml
|
||||
Pillow
|
||||
datetime
|
||||
beautifulsoup4
|
||||
PyYAML
|
||||
requests
|
||||
requests_cache
|
||||
7
requirements_test.txt
Normal file
7
requirements_test.txt
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
black
|
||||
isort
|
||||
mypy
|
||||
pylint
|
||||
pytest
|
||||
pytest-cov
|
||||
tox
|
||||
|
|
@ -1,205 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
import time
|
||||
|
||||
|
||||
def get_scryfall(setUrl='https://api.scryfall.com/cards/search?q=++e:xln'):
|
||||
#getUrl = 'https://api.scryfall.com/cards/search?q=++e:'
|
||||
#setUrl = getUrl + code.lower()
|
||||
setDone = False
|
||||
scryfall = []
|
||||
|
||||
while setDone == False:
|
||||
setcards = requests.get(setUrl)
|
||||
setcards = setcards.json()
|
||||
if setcards.has_key('data'):
|
||||
scryfall.append(setcards['data'])
|
||||
else:
|
||||
setDone = True
|
||||
print 'No Scryfall data'
|
||||
scryfall = ['']
|
||||
time.sleep(.1) # 100ms sleep, see "Rate Limits and Good Citizenship" at https://scryfall.com/docs/api
|
||||
if setcards.has_key('has_more'):
|
||||
if setcards['has_more']:
|
||||
setUrl = setcards['next_page']
|
||||
else:
|
||||
setDone = True
|
||||
else:
|
||||
print 'Scryfall does not "has_more"'
|
||||
setDone = True
|
||||
if not scryfall[0] == '':
|
||||
import json
|
||||
scryfall2 = []
|
||||
for cardarray in scryfall:
|
||||
for card in cardarray:
|
||||
scryfall2.append(card)
|
||||
scryfall = convert_scryfall(scryfall2)
|
||||
return {'cards': scryfall}
|
||||
else:
|
||||
return {'cards': []}
|
||||
|
||||
|
||||
def convert_scryfall(scryfall):
|
||||
cards2 = []
|
||||
scryfall2 = []
|
||||
for card in scryfall:
|
||||
if card == "cards" or card == "" or card == []:
|
||||
continue
|
||||
if 'layout' in card and card['layout'] == 'transform':
|
||||
cardNoFaces = {}
|
||||
for key in card:
|
||||
if key != 'card_faces':
|
||||
cardNoFaces[key] = card[key]
|
||||
cardNoFaces['layout'] = 'double-faced'
|
||||
cardNoFaces['names'] = [card['card_faces'][0]['name'], card['card_faces'][1]['name']]
|
||||
card1 = dict(cardNoFaces.items() + card['card_faces'][0].items())
|
||||
card2 = dict(cardNoFaces.items() + card['card_faces'][1].items())
|
||||
card1['collector_number'] = card1['collector_number'] + 'a'
|
||||
card2['collector_number'] = card2['collector_number'] + 'b'
|
||||
scryfall2.append(card1)
|
||||
scryfall2.append(card2)
|
||||
else:
|
||||
scryfall2.append(card)
|
||||
scryfall = scryfall2
|
||||
for card in scryfall:
|
||||
card2 = {}
|
||||
card2['cmc'] = int(card['cmc'])
|
||||
if 'names' in card:
|
||||
card2['names'] = card['names']
|
||||
if card.has_key('mana_cost'):
|
||||
card2['manaCost'] = card['mana_cost'].replace(
|
||||
'{', '').replace('}', '')
|
||||
else:
|
||||
card2['manaCost'] = ''
|
||||
card2['name'] = card['name']
|
||||
card2['number'] = card['collector_number']
|
||||
card2['rarity'] = card['rarity'].replace(
|
||||
'mythic', 'mythic rare').title()
|
||||
if card.has_key('oracle_text'):
|
||||
card2['text'] = card['oracle_text'].replace(
|
||||
u"\u2014", '-').replace(u"\u2212", "-")
|
||||
else:
|
||||
card2['text'] = ''
|
||||
if 'image_uri' in card:
|
||||
card2['url'] = card['image_uri']
|
||||
elif 'image_uris' in card:
|
||||
if 'large' in card['image_uris']:
|
||||
card2['url'] = card['image_uris']['large']
|
||||
elif 'normal' in card['image_uris']:
|
||||
card2['url'] = card['image_uris']['normal']
|
||||
elif 'small' in card['image_uris']:
|
||||
card2['url'] = card['image_uris']['small']
|
||||
|
||||
if not 'type_line' in card:
|
||||
card['type_line'] = 'Unknown'
|
||||
card2['type'] = card['type_line'].replace(u'—', '-')
|
||||
cardtypes = card['type_line'].split(u' — ')[0].replace('Legendary ', '').replace('Snow ', '')\
|
||||
.replace('Elite ', '').replace('Basic ', '').replace('World ', '').replace('Ongoing ', '')
|
||||
cardtypes = cardtypes.split(' ')
|
||||
if u' — ' in card['type_line']:
|
||||
cardsubtypes = card['type_line'].split(u' — ')[1]
|
||||
if ' ' in cardsubtypes:
|
||||
card2['subtypes'] = cardsubtypes.split(' ')
|
||||
else:
|
||||
card2['subtypes'] = [cardsubtypes]
|
||||
if 'Legendary' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Legendary')
|
||||
else:
|
||||
card2['supertypes'] = ['Legendary']
|
||||
if 'Snow' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Snow')
|
||||
else:
|
||||
card2['supertypes'] = ['Snow']
|
||||
if 'Elite' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Elite')
|
||||
else:
|
||||
card2['supertypes'] = ['Elite']
|
||||
if 'Basic' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Basic')
|
||||
else:
|
||||
card2['supertypes'] = ['Basic']
|
||||
if 'World' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('World')
|
||||
else:
|
||||
card2['supertypes'] = ['World']
|
||||
if 'Ongoing' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Ongoing')
|
||||
else:
|
||||
card2['supertypes'] = ['Ongoing']
|
||||
card2['types'] = cardtypes
|
||||
if card.has_key('color_identity'):
|
||||
card2['colorIdentity'] = card['color_identity']
|
||||
if card.has_key('colors'):
|
||||
if not card['colors'] == []:
|
||||
card2['colors'] = []
|
||||
if 'W' in card['colors']:
|
||||
card2['colors'].append("White")
|
||||
if 'U' in card['colors']:
|
||||
card2['colors'].append("Blue")
|
||||
if 'B' in card['colors']:
|
||||
card2['colors'].append("Black")
|
||||
if 'R' in card['colors']:
|
||||
card2['colors'].append("Red")
|
||||
if 'G' in card['colors']:
|
||||
card2['colors'].append("Green")
|
||||
#card2['colors'] = card['colors']
|
||||
if card.has_key('all_parts'):
|
||||
card2['names'] = []
|
||||
for partname in card['all_parts']:
|
||||
card2['names'].append(partname['name'])
|
||||
if card.has_key('power'):
|
||||
card2['power'] = card['power']
|
||||
if card.has_key('toughness'):
|
||||
card2['toughness'] = card['toughness']
|
||||
if card.has_key('layout'):
|
||||
if card['layout'] != 'normal':
|
||||
card2['layout'] = card['layout']
|
||||
if card.has_key('loyalty'):
|
||||
card2['loyalty'] = card['loyalty']
|
||||
if card.has_key('artist'):
|
||||
card2['artist'] = card['artist']
|
||||
# if card.has_key('source'):
|
||||
# card2['source'] = card['source']
|
||||
# if card.has_key('rulings'):
|
||||
# card2['rulings'] = card['rulings']
|
||||
if card.has_key('flavor_text'):
|
||||
card2['flavor'] = card['flavor_text']
|
||||
if card.has_key('multiverse_id'):
|
||||
card2['multiverseid'] = card['multiverse_id']
|
||||
|
||||
cards2.append(card2)
|
||||
|
||||
return cards2
|
||||
|
||||
|
||||
def smash_mtgs_scryfall(mtgs, scryfall):
|
||||
for mtgscard in mtgs['cards']:
|
||||
cardFound = False
|
||||
for scryfallcard in scryfall['cards']:
|
||||
if scryfallcard['name'] == mtgscard['name']:
|
||||
for key in scryfallcard:
|
||||
if key in mtgscard:
|
||||
if not mtgscard[key] == scryfallcard[key]:
|
||||
try:
|
||||
print "%s's key %s\nMTGS : %s\nScryfall: %s" % (mtgscard['name'], key, mtgscard[key], scryfallcard[key])
|
||||
except:
|
||||
print "Error printing Scryfall vs MTGS debug info for " + mtgscard['name']
|
||||
pass
|
||||
cardFound = True
|
||||
if not cardFound:
|
||||
print "MTGS has card %s and Scryfall does not." % mtgscard['name']
|
||||
for scryfallcard in scryfall['cards']:
|
||||
cardFound = False
|
||||
for mtgscard in mtgs['cards']:
|
||||
if scryfallcard['name'] == mtgscard['name']:
|
||||
cardFound = True
|
||||
if not cardFound:
|
||||
print "Scryfall has card %s and MTGS does not." % scryfallcard['name']
|
||||
|
||||
return mtgs
|
||||
95
set_info.yml
95
set_info.yml
|
|
@ -1,95 +0,0 @@
|
|||
# Every set needs to start with three dashes "---" above its information.
|
||||
# No closing dashes after each set or at the end of the file needed!
|
||||
#
|
||||
# required keys
|
||||
#
|
||||
# code: FSN
|
||||
# name: "Full Set Name"
|
||||
# size: 274
|
||||
# releaseDate: "2050-02-28"
|
||||
# type: expansion
|
||||
#
|
||||
# optional keys
|
||||
#
|
||||
# block: "Block Name"
|
||||
# mtgsurl: "http://url_to_mtgsalvation.com/spoilers/page
|
||||
# mtgscardpath "http://url_to_mtgsalvation.com/cards/setpage/"
|
||||
# fullSpoil: false
|
||||
# noRSS: true #don't check MTGS spoiler newsfeed spoiler.rss for this set
|
||||
# noBooster:
|
||||
# mythicCode:
|
||||
# mythicOnly:
|
||||
# scryfallOnly:
|
||||
# masterpieces:
|
||||
#
|
||||
# Masterpieces contain code, name, releaseDate as above
|
||||
# and requires mtgsurl and mtgscardpath
|
||||
# also can contain
|
||||
#
|
||||
# alternativeNames: ["Same as set long name, but minus 'Masterpiece Series:'"]
|
||||
#
|
||||
# Example "Hour of Devastation" info (scraped from MTGS) with leading dashes and masterpieces from all its block:
|
||||
# ---
|
||||
# code: "HOU"
|
||||
# name: "Hour of Devastation"
|
||||
# block: "Amonkhet"
|
||||
# size: 199
|
||||
# releaseDate: "2017-07-14"
|
||||
# type: "expansion" #can be "expansion", "core", "commander", "masters" - for full list see http://mtgjson.com/documentation.html#sets
|
||||
# mtgsurl: "http://www.mtgsalvation.com/spoilers/183-hour-of-devastation" #looks like http://www.mtgsalvation.com/spoilers/183 automatically redirects to same page
|
||||
# mtgscardpath: "http://www.mtgsalvation.com/cards/hour-of-devastation/" #important: don't forget the trailing slash "/" at the end of the link!
|
||||
# fullSpoil: false
|
||||
# masterpieces:
|
||||
# code: "MPS_AKH"
|
||||
# name: "Masterpiece Series: Amonkhet Invocations"
|
||||
# releaseDate: "2017-04-28"
|
||||
# alternativeNames: ["Amonkhet Invocations"]
|
||||
# galleryURL: "http://magic.wizards.com/en/articles/archive/feature/masterpiece-series-hour-devastation-invocations-2017-06-19"
|
||||
# additionalCardNames: []
|
||||
# mtgsurl: "http://www.mtgsalvation.com/spoilers/181-amonkhet-invocations"
|
||||
# mtgscardpath: "http://www.mtgsalvation.com/cards/amonkhet-invocations/"
|
||||
---
|
||||
code: "RIX"
|
||||
name: "Rivals of Ixalan"
|
||||
block: "Ixalan"
|
||||
size: 196
|
||||
releaseDate: "2018-01-19"
|
||||
type: "expansion"
|
||||
mtgsurl: "http://www.mtgsalvation.com/spoilers/188-rivals-of-ixalan"
|
||||
mtgscardpath: "http://www.mtgsalvation.com/cards/rivals-of-ixalan/"
|
||||
fullSpoil: false
|
||||
noRSS: false
|
||||
scryfallOnly: true
|
||||
---
|
||||
code: "A25"
|
||||
name: "Masters 25"
|
||||
#block: "?"
|
||||
#size: ?
|
||||
releaseDate: "2018-03-16"
|
||||
type: "masters"
|
||||
mtgsurl: ""
|
||||
#mtgscardpath: "?"
|
||||
fullSpoil: false
|
||||
noRSS: true
|
||||
---
|
||||
code: "DOM"
|
||||
name: "Dominaria"
|
||||
#block: "?"
|
||||
size: 269
|
||||
releaseDate: "2018-04-27"
|
||||
type: "expansion"
|
||||
mtgsurl: ""
|
||||
#mtgscardpath: "?"
|
||||
fullSpoil: false
|
||||
noRSS: true
|
||||
---
|
||||
code: "M19"
|
||||
name: "Core Set 2019"
|
||||
#block: "?"
|
||||
size: 269
|
||||
releaseDate: "2018-07-20"
|
||||
type: "core"
|
||||
mtgsurl: ""
|
||||
#mtgscardpath: "?"
|
||||
fullSpoil: false
|
||||
noRSS: true
|
||||
24
setup.py
Normal file
24
setup.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"""Installation setup for Magic-Spoiler."""
|
||||
|
||||
import setuptools
|
||||
|
||||
# Necessary for TOX
|
||||
setuptools.setup(
|
||||
name="Magic-Spoiler",
|
||||
version="0.1.0",
|
||||
author="Zach Halpern",
|
||||
author_email="zach@cockatrice.us",
|
||||
url="https://github.com/Cockatrice/Magic-Spoiler/",
|
||||
description="Build XML files for distribution of MTG spoiler cards",
|
||||
long_description=open("README.md", "r").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
license="GPL-3.0",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
|
||||
],
|
||||
keywords="Magic: The Gathering, MTG, XML, Card Games, Collectible, Trading Cards",
|
||||
packages=setuptools.find_packages(),
|
||||
)
|
||||
823
spoilers.py
823
spoilers.py
|
|
@ -1,823 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
import re
|
||||
import os
|
||||
from lxml import html
|
||||
import datetime
|
||||
import json
|
||||
import mtgs_scraper
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def correct_cards(mtgjson, manual_cards=[], card_corrections=[], delete_cards=[]):
|
||||
mtgjson2 = []
|
||||
for card in manual_cards:
|
||||
if 'manaCost' in card:
|
||||
card['manaCost'] = str(card['manaCost'])
|
||||
if 'number' in card:
|
||||
card['number'] = str(card['number'])
|
||||
if 'cmc' not in card:
|
||||
workingCMC = 0
|
||||
if 'manaCost' in card:
|
||||
stripCost = card['manaCost'].replace('{','').replace('}','')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
card['cmc'] = workingCMC
|
||||
if 'types' not in card:
|
||||
card['types'] = []
|
||||
workingtypes = card['type']
|
||||
if ' - ' in workingtypes:
|
||||
workingtypes = card['type'].split(' - ')[0]
|
||||
card['types'] = workingtypes.replace('Legendary ', '').replace('Snow ', '') \
|
||||
.replace('Elite ', '').replace('Basic ', '').replace('World ', '').replace('Ongoing ', '') \
|
||||
.strip().split(' ')
|
||||
if 'subtypes' not in card:
|
||||
# if '—' in card['type']:
|
||||
# workingSubtypes = card['type'].split('—')[1].strip()
|
||||
if '-' in card['type']:
|
||||
workingSubtypes = card['type'].split('-')[1].strip()
|
||||
if workingSubtypes:
|
||||
card['subtypes'] = workingSubtypes.split(' ')
|
||||
colorMap = {
|
||||
"W": "White",
|
||||
"U": "Blue",
|
||||
"B": "Black",
|
||||
"R": "Red",
|
||||
"G": "Green"
|
||||
}
|
||||
if 'manaCost' in card:
|
||||
if 'text' in card and not 'Devoid' in card['text']:
|
||||
for letter in str(card['manaCost']):
|
||||
if not letter.isdigit() and not letter == 'X':
|
||||
if 'colorIdentity' in card:
|
||||
if not letter in card['colorIdentity']:
|
||||
card['colorIdentity'] += letter
|
||||
else:
|
||||
card['colorIdentity'] = [letter]
|
||||
if 'colors' in card:
|
||||
if not colorMap[letter] in card['colors']:
|
||||
card['colors'].append(colorMap[letter])
|
||||
else:
|
||||
card['colors'] = [colorMap[letter]]
|
||||
if 'text' in card:
|
||||
for CID in colorMap:
|
||||
if '{' + CID + '}' in card['text']:
|
||||
if 'colorIdentity' in card:
|
||||
if not CID in card['colorIdentity']:
|
||||
card['colorIdentity'] += CID
|
||||
else:
|
||||
card['colorIdentity'] = [CID]
|
||||
manual_added = []
|
||||
for card in mtgjson['cards']:
|
||||
isManual = False
|
||||
for manualCard in manual_cards:
|
||||
if card['name'] == manualCard['name']:
|
||||
mtgjson2.append(manualCard)
|
||||
manual_added.append(manualCard['name'] + " (overwritten)")
|
||||
isManual = True
|
||||
if not isManual and not card['name'] in delete_cards:
|
||||
mtgjson2.append(card)
|
||||
for manualCard in manual_cards:
|
||||
addManual = True
|
||||
for card in mtgjson['cards']:
|
||||
if manualCard['name'] == card['name']:
|
||||
addManual = False
|
||||
if addManual:
|
||||
mtgjson2.append(manualCard)
|
||||
manual_added.append(manualCard['name'])
|
||||
if manual_added != []:
|
||||
print "Manual Cards Added: " + str(manual_added).strip('[]')
|
||||
|
||||
mtgjson = {"cards": mtgjson2}
|
||||
transforms = {}
|
||||
for card in mtgjson['cards']:
|
||||
if 'text' in card:
|
||||
if '{' in card['text']:
|
||||
card['text'] = re.sub(r'{(.*?)}', replace_costs, card['text'])
|
||||
for card2 in mtgjson['cards']:
|
||||
if 'number' in card and 'number' in card2 and card2['number'] == card['number'] and \
|
||||
not card['name'] == card2['name'] and card['number'] != '?' and card2['number'] != '?':
|
||||
transforms[card['name']] = card2['name']
|
||||
if 'number' in card and not '?' in card['number']:
|
||||
if 'transforms from' in card['text'].lower():
|
||||
if 'number' in card:
|
||||
if not 'b' in card['number']:
|
||||
if 'a' in card['number']:
|
||||
card['number'] = card['number'].replace('a','b')
|
||||
else:
|
||||
card['number'] = str(card['number']) + 'b'
|
||||
card['layout'] = 'double-faced'
|
||||
if 'transform ' in card['text'].lower() or 'transformed' in card['text'].lower():
|
||||
if 'number' in card:
|
||||
if not 'a' in card['number']:
|
||||
if 'b' in card['number']:
|
||||
card['number'] = card['number'].replace('b','a')
|
||||
else:
|
||||
card['number'] = str(card['number']) + 'a'
|
||||
card['layout'] = 'double-faced'
|
||||
if 'number' in card and 'a' in card['number'] or 'b' in card['number']:
|
||||
for card1 in transforms:
|
||||
if card['name'] == card1:
|
||||
if 'a' in card['number']:
|
||||
card['names'] = [card1, transforms[card1]]
|
||||
else:
|
||||
card['names'] = [transforms[card1], card1]
|
||||
if card['name'] == transforms[card1]:
|
||||
if 'a' in card['number']:
|
||||
card['names'] = [card['name'], card1]
|
||||
else:
|
||||
card['names'] = [card1, card['name']]
|
||||
|
||||
return mtgjson
|
||||
|
||||
|
||||
def replace_costs(match):
|
||||
full_cost = match.group(1)
|
||||
individual_costs = []
|
||||
if len(full_cost) > 0:
|
||||
for x in range(0, len(full_cost)):
|
||||
individual_costs.append('{' + str(full_cost[x]).upper() + '}')
|
||||
return ''.join(individual_costs)
|
||||
|
||||
|
||||
def error_check(mtgjson, card_corrections={}):
|
||||
errors = []
|
||||
for card in mtgjson['cards']:
|
||||
for key in card:
|
||||
if key == "":
|
||||
errors.append({"name": card['name'], "key": key, "value": ""})
|
||||
requiredKeys = ['name', 'type', 'types']
|
||||
for requiredKey in requiredKeys:
|
||||
if not requiredKey in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": key, "missing": True})
|
||||
if 'text' in card:
|
||||
card['text'] = card['text'].replace('<i>', '').replace(
|
||||
'</i>', '').replace('<em>', '').replace('</em', '').replace('•', u'•')
|
||||
if 'type' in card:
|
||||
if 'Planeswalker' in card['type']:
|
||||
if not 'loyalty' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "loyalty", "value": ""})
|
||||
if not card['rarity'] == 'Mythic Rare':
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "rarity", "value": card['rarity']})
|
||||
if not 'subtypes' in card:
|
||||
errors.append({"name": card['name'], "key": "subtypes", "oldvalue": "",
|
||||
"newvalue": card['name'].split(" ")[0], "fixed": True})
|
||||
if not card['name'].split(' ')[0] == 'Ob' and not card['name'].split(' ') == 'Nicol':
|
||||
card["subtypes"] = card['name'].split(" ")[0]
|
||||
else:
|
||||
card["subtypes"] = card['name'].split(" ")[1]
|
||||
if not 'types' in card:
|
||||
#errors.append({"name": card['name'], "key": "types", "fixed": True, "oldvalue": "", "newvalue": ["Planeswalker"]})
|
||||
card['types'] = ["Planeswalker"]
|
||||
elif not "Planeswalker" in card['types']:
|
||||
#errors.append({"name": card['name'], "key": "types", "fixed": True, "oldvalue": card['types'], "newvalue": card['types'] + ["Planeswalker"]})
|
||||
card['types'].append("Planeswalker")
|
||||
if 'Creature' in card['type']:
|
||||
if not 'power' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "power", "value": ""})
|
||||
if not 'toughness' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "toughness", "value": ""})
|
||||
if not 'subtypes' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "subtypes", "value": ""})
|
||||
if '-' in card['type']:
|
||||
card['type'] = card['type'].replace('-',u'—')
|
||||
if 'manaCost' in card and card['manaCost'] != "":
|
||||
workingCMC = 0
|
||||
stripCost = card['manaCost'].replace('{', '').replace('}', '')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
if not 'cmc' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "cmc", "value": ""})
|
||||
elif not card['cmc'] == workingCMC:
|
||||
errors.append({"name": card['name'], "key": "cmc", "oldvalue": card['cmc'],
|
||||
"newvalue": workingCMC, "fixed": True, "match": card['manaCost']})
|
||||
card['cmc'] = workingCMC
|
||||
else:
|
||||
if 'type' in card and not 'land' in card['type'].lower():
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "manaCost", "value": ""})
|
||||
if not 'cmc' in card:
|
||||
errors.append({"name": card['name'], "key": "cmc", "value": ""})
|
||||
else:
|
||||
if not isinstance(card['cmc'], int):
|
||||
errors.append({"name": card['name'], "key": "cmc", "oldvalue": card['cmc'], "newvalue": int(
|
||||
card['cmc']), "fixed": True})
|
||||
card['cmc'] = int(card['cmc'])
|
||||
else:
|
||||
if card['cmc'] > 0:
|
||||
if not 'manaCost' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "manaCost", "value": "", "match": card['cmc']})
|
||||
else:
|
||||
if 'manaCost' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "manaCost", "oldvalue": card['manaCost'], "fixed": True})
|
||||
del card["manaCost"]
|
||||
if 'colors' in card:
|
||||
if not 'colorIdentity' in card:
|
||||
if 'text' in card:
|
||||
if not 'devoid' in card['text'].lower():
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "colorIdentity", "value": ""})
|
||||
else:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "colorIdentity", "value": ""})
|
||||
if 'colorIdentity' in card:
|
||||
if not 'colors' in card:
|
||||
# this one will false positive on emerge cards
|
||||
if not 'Land' in card['type'] and not 'Artifact' in card['type'] and not 'Eldrazi' in card['type']:
|
||||
if 'text' in card:
|
||||
if not 'emerge' in card['text'].lower() and not 'devoid' in card['text'].lower():
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "colors", "value": ""})
|
||||
else:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "colors", "value": ""})
|
||||
# if not 'Land' in card['type'] and not 'Artifact' in card['type'] and not 'Eldrazi' in card['type']:
|
||||
# errors.append({"name": card['name'], "key": "colors", "value": ""})
|
||||
if not 'url' in card:
|
||||
errors.append({"name": card['name'], "key": "url", "value": ""})
|
||||
elif len(card['url']) < 10:
|
||||
errors.append({"name": card['name'], "key": "url", "value": ""})
|
||||
if not 'number' in card:
|
||||
errors.append({"name": card['name'], "key": "number", "value": ""})
|
||||
if not 'types' in card:
|
||||
errors.append({"name": card['name'], "key": "types", "value": ""})
|
||||
else:
|
||||
for type in card['types']:
|
||||
if type not in ['Creature', 'Artifact', 'Conspiracy', 'Enchantment', 'Instant', 'Land', 'Phenomenon', 'Plane', 'Planeswalker', 'Scheme',
|
||||
'Sorcery', 'Tribal', 'Vanguard']:
|
||||
errors.append({"name": card['name'], "key": "types", "value":card['types']})
|
||||
|
||||
# we're going to loop through again and make sure split cards get paired
|
||||
for card in mtgjson['cards']:
|
||||
if 'layout' in card:
|
||||
if card['layout'] == 'split' or card['layout'] == 'meld' or card['layout'] == 'aftermath':
|
||||
if not 'names' in card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "names", "value": ""})
|
||||
else:
|
||||
for related_card_name in card['names']:
|
||||
if related_card_name != card['name']:
|
||||
related_card = False
|
||||
for card2 in mtgjson['cards']:
|
||||
if card2['name'] == related_card_name:
|
||||
related_card = card2
|
||||
if not related_card:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "names", "value": card['names']})
|
||||
else:
|
||||
if 'colors' in related_card:
|
||||
for color in related_card['colors']:
|
||||
if not 'colors' in card:
|
||||
card['colors'] = [color]
|
||||
elif not color in card['colors']:
|
||||
card['colors'].append(color)
|
||||
if 'colorIdentity' in related_card:
|
||||
for colorIdentity in related_card['colorIdentity']:
|
||||
if not 'colorIdentity' in card:
|
||||
card['colorIdentity'] = [
|
||||
colorIdentity]
|
||||
elif not colorIdentity in card['colorIdentity']:
|
||||
card['colorIdentity'].append(
|
||||
colorIdentity)
|
||||
if 'number' in card:
|
||||
if not 'a' in card['number'] and not 'b' in card['number'] and not 'c' in card['number']:
|
||||
errors.append(
|
||||
{"name": card['name'], "key": "number", "value": card['number']})
|
||||
|
||||
for card in mtgjson['cards']:
|
||||
for cardCorrection in card_corrections:
|
||||
if card['name'] == cardCorrection:
|
||||
for correctionType in card_corrections[cardCorrection]:
|
||||
# if not correctionType in card and correctionType not in :
|
||||
# sys.exit("Invalid correction for " + cardCorrection + " of type " + card)
|
||||
if correctionType == 'number':
|
||||
card_corrections[cardCorrection]['number'] = str(card_corrections[cardCorrection]['number'])
|
||||
if not correctionType == 'name':
|
||||
if correctionType == 'img':
|
||||
card['url'] = card_corrections[cardCorrection][correctionType]
|
||||
else:
|
||||
card[correctionType] = card_corrections[cardCorrection][correctionType]
|
||||
if 'name' in card_corrections[cardCorrection]:
|
||||
card['name'] = card_corrections[cardCorrection]['name']
|
||||
|
||||
return [mtgjson, errors]
|
||||
|
||||
|
||||
def remove_corrected_errors(errorlog=[], card_corrections=[], print_fixed=False):
|
||||
errorlog2 = {}
|
||||
for error in errorlog:
|
||||
if not print_fixed:
|
||||
if 'fixed' in error and error['fixed'] == True:
|
||||
continue
|
||||
removeError = False
|
||||
for correction in card_corrections:
|
||||
for correction_type in card_corrections[correction]:
|
||||
if error['name'] == correction:
|
||||
if error['key'] == correction_type:
|
||||
removeError = True
|
||||
if not removeError:
|
||||
if not error['name'] in errorlog2:
|
||||
errorlog2[error['name']] = {}
|
||||
if not 'value' in error:
|
||||
error['value'] = ""
|
||||
errorlog2[error['name']][error['key']] = error['value']
|
||||
return errorlog2
|
||||
|
||||
|
||||
def get_image_urls(mtgjson, isfullspoil, setinfo=False):
|
||||
if not 'mythicCode' in setinfo:
|
||||
setinfo['mythicCode'] = setinfo['code']
|
||||
IMAGES = 'https://magic.wizards.com/en/products/' + \
|
||||
setinfo['name'].lower().replace(' ', '-') + '/cards'
|
||||
IMAGES2 = 'http://mythicspoiler.com/newspoilers.html'
|
||||
IMAGES3 = 'http://magic.wizards.com/en/articles/archive/card-image-gallery/' + \
|
||||
setinfo['name'].lower().replace('of', '').replace(' ', ' ').replace(' ', '-')
|
||||
|
||||
text = requests.get(IMAGES).text
|
||||
text2 = requests.get(IMAGES2).text
|
||||
text3 = requests.get(IMAGES3).text
|
||||
wotcpattern = r'<img alt="{}.*?" src="(?P<img>.*?\.png)"'
|
||||
wotcpattern2 = r'<img src="(?P<img>.*?\.png).*?alt="{}.*?"'
|
||||
mythicspoilerpattern = r' src="' + setinfo['mythicCode'].lower() + '/cards/{}.*?.jpg">'
|
||||
WOTC = []
|
||||
for c in mtgjson['cards']:
|
||||
if 'names' in c:
|
||||
cardname = ' // '.join(c['names'])
|
||||
else:
|
||||
cardname = c['name']
|
||||
match = re.search(wotcpattern.format(
|
||||
cardname.replace('\'', '’')), text, re.DOTALL)
|
||||
if match:
|
||||
c['url'] = match.groupdict()['img']
|
||||
else:
|
||||
match3 = re.search(wotcpattern2.format(
|
||||
cardname.replace('\'', '’')), text3)
|
||||
if match3:
|
||||
c['url'] = match3.groupdict()['img']
|
||||
else:
|
||||
match4 = re.search(wotcpattern.format(
|
||||
cardname.replace('\'', '’')), text3, re.DOTALL)
|
||||
if match4:
|
||||
c['url'] = match4.groupdict()['img']
|
||||
else:
|
||||
match2 = re.search(mythicspoilerpattern.format(cardname.lower().replace(' // ', '').replace(
|
||||
' ', '').replace(''', '').replace('-', '').replace('\'', '').replace(',', '')), text2, re.DOTALL)
|
||||
if match2 and not isfullspoil:
|
||||
c['url'] = match2.group(0).replace(
|
||||
' src="', 'http://mythicspoiler.com/').replace('">', '')
|
||||
pass
|
||||
if 'wizards.com' in c['url']:
|
||||
WOTC.append(c['name'])
|
||||
if setinfo:
|
||||
if 'mtgsurl' in setinfo and 'mtgscardpath' in setinfo:
|
||||
mtgsImages = mtgs_scraper.scrape_mtgs_images(
|
||||
setinfo['mtgsurl'], setinfo['mtgscardpath'], WOTC)
|
||||
for card in mtgjson['cards']:
|
||||
if card['name'] in mtgsImages:
|
||||
if mtgsImages[card['name']]['url'] != '':
|
||||
card['url'] = mtgsImages[card['name']]['url']
|
||||
|
||||
#for card in mtgjson['cards']:
|
||||
# if len(str(card['url'])) < 10:
|
||||
# print(card['name'] + ' has no image.')
|
||||
return mtgjson
|
||||
|
||||
|
||||
def write_xml(mtgjson, code, name, releaseDate):
|
||||
if not 'cards' in mtgjson or not mtgjson['cards'] or mtgjson['cards'] == []:
|
||||
return
|
||||
if not os.path.isdir('out/'):
|
||||
os.makedirs('out/')
|
||||
cardsxml = open('out/' + code + '.xml', 'w+')
|
||||
cardsxml.truncate()
|
||||
count = 0
|
||||
dfccount = 0
|
||||
newest = ''
|
||||
related = 0
|
||||
cardsxml.write("<?xml version='1.0' encoding='UTF-8'?>\n"
|
||||
"<cockatrice_carddatabase version='3'>\n"
|
||||
"<sets>\n<set>\n<name>"
|
||||
+ code +
|
||||
"</name>\n"
|
||||
"<longname>"
|
||||
+ name +
|
||||
"</longname>\n"
|
||||
"<settype>Expansion</settype>\n"
|
||||
"<releasedate>"
|
||||
+ releaseDate +
|
||||
"</releasedate>\n"
|
||||
"</set>\n"
|
||||
"</sets>\n"
|
||||
"<cards>\n")
|
||||
# print mtgjson
|
||||
for card in mtgjson["cards"]:
|
||||
if 'names' in card:
|
||||
if 'layout' in card and card['layout'] != 'double-faced':
|
||||
if card["name"] == card['names'][1]:
|
||||
continue
|
||||
if count == 0:
|
||||
newest = card["name"]
|
||||
count += 1
|
||||
name = card["name"]
|
||||
if card.has_key("manaCost"):
|
||||
manacost = card["manaCost"].replace('{', '').replace('}', '')
|
||||
else:
|
||||
manacost = ""
|
||||
if card.has_key("power") or card.has_key("toughness"):
|
||||
if card["power"]:
|
||||
pt = str(card["power"]) + "/" + str(card["toughness"])
|
||||
else:
|
||||
pt = 0
|
||||
else:
|
||||
pt = 0
|
||||
if card.has_key("text"):
|
||||
text = card["text"]
|
||||
else:
|
||||
text = ""
|
||||
cardcmc = str(card['cmc'])
|
||||
cardtype = card["type"]
|
||||
if card.has_key("names"):
|
||||
if "layout" in card:
|
||||
if card['layout'] == 'split' or card['layout'] == 'aftermath':
|
||||
if 'names' in card:
|
||||
if card['name'] == card['names'][0]:
|
||||
for jsoncard in mtgjson["cards"]:
|
||||
if jsoncard['name'] == card['names'][1]:
|
||||
cardtype += " // " + jsoncard["type"]
|
||||
newmanacost = ""
|
||||
if 'manaCost' in jsoncard:
|
||||
newmanacost = jsoncard['manaCost']
|
||||
manacost += " // " + \
|
||||
newmanacost.replace(
|
||||
'{', '').replace('}', '')
|
||||
cardcmc += " // " + str(jsoncard["cmc"])
|
||||
text += "\n---\n" + jsoncard["text"]
|
||||
name += " // " + jsoncard['name']
|
||||
elif card['layout'] == 'double-faced':
|
||||
if not 'names' in card:
|
||||
print card['name'] + ' is double-faced but no "names" key'
|
||||
else:
|
||||
for dfcname in card['names']:
|
||||
if dfcname != card['name']:
|
||||
related = dfcname
|
||||
else:
|
||||
print card["name"] + " has names, but layout != split, aftermath, or double-faced"
|
||||
else:
|
||||
print card["name"] + " has multiple names and no 'layout' key"
|
||||
|
||||
tablerow = "1"
|
||||
if "Land" in cardtype:
|
||||
tablerow = "0"
|
||||
elif "Sorcery" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Instant" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Creature" in cardtype:
|
||||
tablerow = "2"
|
||||
|
||||
if 'number' in card:
|
||||
if 'b' in str(card['number']):
|
||||
if 'layout' in card:
|
||||
if card['layout'] == 'split' or card['layout'] == 'aftermath':
|
||||
# print "We're skipping " + card['name'] + " because it's the right side of a split card"
|
||||
continue
|
||||
|
||||
cardsxml.write("<card>\n")
|
||||
cardsxml.write("<name>" + name.encode('utf-8') + "</name>\n")
|
||||
cardsxml.write(
|
||||
'<set rarity="' + card['rarity'] + '" picURL="' + card["url"] + '">' + code + '</set>\n')
|
||||
cardsxml.write(
|
||||
"<manacost>" + manacost.encode('utf-8') + "</manacost>\n")
|
||||
cardsxml.write("<cmc>" + cardcmc + "</cmc>\n")
|
||||
if card.has_key('colors'):
|
||||
colorTranslate = {
|
||||
"White": "W",
|
||||
"Blue": "U",
|
||||
"Black": "B",
|
||||
"Red": "R",
|
||||
"Green": "G"
|
||||
}
|
||||
for color in card['colors']:
|
||||
cardsxml.write(
|
||||
'<color>' + colorTranslate[color] + '</color>\n')
|
||||
if name + ' enters the battlefield tapped' in text:
|
||||
cardsxml.write("<cipt>1</cipt>\n")
|
||||
cardsxml.write("<type>" + cardtype.encode('utf-8') + "</type>\n")
|
||||
if pt:
|
||||
cardsxml.write("<pt>" + pt + "</pt>\n")
|
||||
if card.has_key('loyalty'):
|
||||
cardsxml.write("<loyalty>" + str(card['loyalty']) + "</loyalty>\n")
|
||||
cardsxml.write("<tablerow>" + tablerow + "</tablerow>\n")
|
||||
cardsxml.write("<text>" + text.encode('utf-8') + "</text>\n")
|
||||
if related:
|
||||
# for relatedname in related:
|
||||
cardsxml.write(
|
||||
"<related>" + related.encode('utf-8') + "</related>\n")
|
||||
related = ''
|
||||
|
||||
cardsxml.write("</card>\n")
|
||||
|
||||
cardsxml.write("</cards>\n</cockatrice_carddatabase>")
|
||||
|
||||
if count > 0:
|
||||
print 'XML Stats for ' + code
|
||||
print 'Total cards: ' + str(count)
|
||||
if dfccount > 0:
|
||||
print 'DFC: ' + str(dfccount)
|
||||
print 'Newest: ' + str(newest)
|
||||
else:
|
||||
print 'Set ' + code + ' has no spoiled cards.'
|
||||
|
||||
|
||||
def write_combined_xml(mtgjson, setinfos):
|
||||
if not os.path.isdir('out/'):
|
||||
os.makedirs('out/')
|
||||
cardsxml = open('out/spoiler.xml', 'w+')
|
||||
cardsxml.truncate()
|
||||
cardsxml.write("<?xml version='1.0' encoding='UTF-8'?>\n"
|
||||
"<cockatrice_carddatabase version='3'>\n")
|
||||
cardsxml.write("<!--\n created: " + datetime.datetime.utcnow().strftime("%a, %b %d %Y, %H:%M:%S") + " (UTC)"
|
||||
+ "\n by: Magic-Spoiler project @ https://github.com/Cockatrice/Magic-Spoiler\n -->\n")
|
||||
cardsxml.write("<sets>\n")
|
||||
for setcode in mtgjson:
|
||||
setobj = mtgjson[setcode]
|
||||
if 'cards' in setobj and len(setobj['cards']) > 0:
|
||||
cardsxml.write("<set>\n<name>"
|
||||
+ setcode +
|
||||
"</name>\n"
|
||||
"<longname>"
|
||||
+ setobj['name'] +
|
||||
"</longname>\n"
|
||||
"<settype>"
|
||||
+ setobj['type'].title() +
|
||||
"</settype>\n"
|
||||
"<releasedate>"
|
||||
+ setobj['releaseDate'] +
|
||||
"</releasedate>\n"
|
||||
"</set>\n")
|
||||
cardsxml.write(
|
||||
"</sets>\n"
|
||||
"<cards>\n")
|
||||
count = 0
|
||||
dfccount = 0
|
||||
newest = ''
|
||||
related = 0
|
||||
for setcode in mtgjson:
|
||||
setobj = mtgjson[setcode]
|
||||
for card in setobj["cards"]:
|
||||
if 'layout' in card and (card['layout'] == 'split' or card['layout'] == 'aftermath'):
|
||||
if 'b' in card["number"]:
|
||||
continue
|
||||
if count == 0:
|
||||
newest = card["name"]
|
||||
count += 1
|
||||
name = card["name"]
|
||||
if card.has_key("manaCost"):
|
||||
manacost = card["manaCost"].replace('{', '').replace('}', '')
|
||||
else:
|
||||
manacost = ""
|
||||
if card.has_key("power") or card.has_key("toughness"):
|
||||
if card["power"]:
|
||||
pt = str(card["power"]) + "/" + str(card["toughness"])
|
||||
else:
|
||||
pt = 0
|
||||
else:
|
||||
pt = 0
|
||||
if card.has_key("text"):
|
||||
text = card["text"]
|
||||
else:
|
||||
text = ""
|
||||
cardcmc = str(card['cmc'])
|
||||
cardtype = card["type"]
|
||||
if card.has_key("names"):
|
||||
if "layout" in card:
|
||||
if card["layout"] != 'split' and card["layout"] != 'aftermath':
|
||||
if len(card["names"]) > 1:
|
||||
if card["names"][0] == card["name"]:
|
||||
related = card["names"][1]
|
||||
text += '\n\n(Related: ' + \
|
||||
card["names"][1] + ')'
|
||||
dfccount += 1
|
||||
elif card['names'][1] == card['name']:
|
||||
related = card["names"][0]
|
||||
text += '\n\n(Related: ' + \
|
||||
card["names"][0] + ')'
|
||||
else:
|
||||
for cardb in setobj['cards']:
|
||||
if cardb['name'] == card["names"][1]:
|
||||
cardtype += " // " + cardb['type']
|
||||
manacost += " // " + \
|
||||
(cardb["manaCost"]).replace(
|
||||
'{', '').replace('}', '')
|
||||
cardcmc += " // " + str(cardb["cmc"])
|
||||
text += "\n---\n" + cardb["text"]
|
||||
name += " // " + cardb['name']
|
||||
else:
|
||||
print card["name"] + " has multiple names and no 'layout' key"
|
||||
|
||||
tablerow = "1"
|
||||
if "Land" in cardtype:
|
||||
tablerow = "0"
|
||||
elif "Sorcery" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Instant" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Creature" in cardtype:
|
||||
tablerow = "2"
|
||||
|
||||
if 'number' in card:
|
||||
if 'b' in card['number']:
|
||||
if 'layout' in card:
|
||||
if card['layout'] == 'split' or card['layout'] == 'aftermath':
|
||||
# print "We're skipping " + card['name'] + " because it's the right side of a split card"
|
||||
continue
|
||||
|
||||
cardsxml.write("<card>\n")
|
||||
cardsxml.write("<name>" + name.encode('utf-8') + "</name>\n")
|
||||
cardsxml.write(
|
||||
'<set rarity="' + card['rarity'] + '" picURL="' + card["url"] + '">' + setcode + '</set>\n')
|
||||
if card.has_key('colors'):
|
||||
colorTranslate = {
|
||||
"White": "W",
|
||||
"Blue": "U",
|
||||
"Black": "B",
|
||||
"Red": "R",
|
||||
"Green": "G"
|
||||
}
|
||||
for color in card['colors']:
|
||||
cardsxml.write(
|
||||
'<color>' + colorTranslate[color] + '</color>\n')
|
||||
if related:
|
||||
# for relatedname in related:
|
||||
cardsxml.write(
|
||||
"<related>" + related.encode('utf-8') + "</related>\n")
|
||||
related = ''
|
||||
cardsxml.write(
|
||||
"<manacost>" + manacost.encode('utf-8') + "</manacost>\n")
|
||||
cardsxml.write("<cmc>" + cardcmc + "</cmc>\n")
|
||||
cardsxml.write("<type>" + cardtype.encode('utf-8') + "</type>\n")
|
||||
if pt:
|
||||
cardsxml.write("<pt>" + pt + "</pt>\n")
|
||||
cardsxml.write("<tablerow>" + tablerow + "</tablerow>\n")
|
||||
cardsxml.write("<text>" + text.encode('utf-8') + "</text>\n")
|
||||
if name + ' enters the battlefield tapped' in text:
|
||||
cardsxml.write("<cipt>1</cipt>\n")
|
||||
if card.has_key('loyalty'):
|
||||
cardsxml.write(
|
||||
"<loyalty>" + str(card['loyalty']) + "</loyalty>\n")
|
||||
cardsxml.write("</card>\n")
|
||||
|
||||
cardsxml.write("</cards>\n</cockatrice_carddatabase>")
|
||||
|
||||
print 'XML COMBINED STATS'
|
||||
print 'Total cards: ' + str(count)
|
||||
if dfccount > 0:
|
||||
print 'DFC: ' + str(dfccount)
|
||||
print 'Newest: ' + str(newest)
|
||||
|
||||
|
||||
def pretty_xml(infile):
|
||||
# or xml.dom.minidom.parseString(xml_string)
|
||||
prettyxml = xml.dom.minidom.parse(infile)
|
||||
pretty_xml_as_string = prettyxml.toprettyxml(newl='')
|
||||
return pretty_xml_as_string
|
||||
|
||||
|
||||
def make_allsets(AllSets, mtgjson, code):
|
||||
AllSets[code] = mtgjson
|
||||
return AllSets
|
||||
|
||||
|
||||
def scrape_masterpieces(url='http://www.mtgsalvation.com/spoilers/181-amonkhet-invocations', mtgscardurl='http://www.mtgsalvation.com/cards/amonkhet-invocations/'):
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = []
|
||||
cardstree = tree.xpath('//*[contains(@class, "log-card")]')
|
||||
for child in cardstree:
|
||||
childurl = mtgscardurl + \
|
||||
child.attrib['data-card-id'] + '-' + child.text.replace(' ', '-')
|
||||
cardpage = requests.get(childurl)
|
||||
tree = html.fromstring(cardpage.content)
|
||||
cardtree = tree.xpath('//img[contains(@class, "card-spoiler-image")]')
|
||||
try:
|
||||
cardurl = cardtree[0].attrib['src']
|
||||
except:
|
||||
cardurl = ''
|
||||
pass
|
||||
card = {
|
||||
"name": child.text,
|
||||
"url": cardurl
|
||||
}
|
||||
cards.append(card)
|
||||
return cards
|
||||
|
||||
|
||||
def make_masterpieces(headers, AllSets, spoil):
|
||||
masterpieces = scrape_masterpieces(
|
||||
headers['mtgsurl'], headers['mtgscardpath'])
|
||||
masterpieces2 = []
|
||||
for masterpiece in masterpieces:
|
||||
matched = False
|
||||
if headers['code'] in AllSets:
|
||||
for oldMasterpiece in AllSets[headers['code']]['cards']:
|
||||
if masterpiece['name'] == oldMasterpiece['name']:
|
||||
matched = True
|
||||
for set in AllSets:
|
||||
if not matched:
|
||||
for oldcard in AllSets[set]['cards']:
|
||||
if oldcard['name'] == masterpiece['name'] and not matched:
|
||||
mixcard = oldcard
|
||||
mixcard['url'] = masterpiece['url']
|
||||
mixcard['rarity'] = 'Mythic Rare'
|
||||
masterpieces2.append(mixcard)
|
||||
matched = True
|
||||
break
|
||||
for spoilcard in spoil['cards']:
|
||||
if not matched:
|
||||
if spoilcard['name'] == masterpiece['name']:
|
||||
mixcard = spoilcard
|
||||
mixcard['rarity'] = 'Mythic Rare'
|
||||
mixcard['url'] = masterpiece['url']
|
||||
masterpieces2.append(mixcard)
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
print "We couldn't find a card object to assign the data to for masterpiece " + masterpiece['name']
|
||||
masterpieces2.append(masterpiece)
|
||||
mpsjson = {
|
||||
"name": headers['name'],
|
||||
"alternativeNames": headers['alternativeNames'],
|
||||
"code": headers['code'],
|
||||
"releaseDate": headers['releaseDate'],
|
||||
"border": "black",
|
||||
"type": "masterpiece",
|
||||
"cards": masterpieces2
|
||||
}
|
||||
return mpsjson
|
||||
|
||||
|
||||
def set_has_cards(setinfo, manual_cards, mtgjson):
|
||||
if setinfo['code'] in manual_cards or setinfo['code'] in mtgjson:
|
||||
return True
|
||||
for card in manual_cards['cards']:
|
||||
if set in card:
|
||||
if set == setinfo['code']:
|
||||
return True
|
||||
|
||||
|
||||
def get_allsets():
|
||||
headers = {'user-agent': 'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko / 20071127 Firefox / 2.0.0.11'}
|
||||
json_file = requests.get('http://mtgjson.com/json/AllSets.json', headers=headers)
|
||||
AllSets = json.loads(json_file.text)
|
||||
return AllSets
|
||||
|
||||
|
||||
def add_headers(mtgjson, setinfos):
|
||||
mtgjson2 = {
|
||||
"border": "black",
|
||||
"code": setinfos['code'],
|
||||
"name": setinfos['name'],
|
||||
"releaseDate": setinfos['releaseDate'],
|
||||
"type": setinfos['type'],
|
||||
"cards": mtgjson['cards']
|
||||
}
|
||||
if not 'noBooster' in setinfos:
|
||||
mtgjson2['booster'] = [
|
||||
[
|
||||
"rare",
|
||||
"mythic rare"
|
||||
],
|
||||
"uncommon",
|
||||
"uncommon",
|
||||
"uncommon",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"land",
|
||||
"marketing"
|
||||
],
|
||||
if 'block' in setinfos:
|
||||
mtgjson2['block'] = setinfos['block']
|
||||
return mtgjson2
|
||||
56
tox.ini
Normal file
56
tox.ini
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
[tox]
|
||||
envlist = isort-inplace, black-inplace, mypy, lint
|
||||
|
||||
[testenv]
|
||||
basepython = python3.7
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/requirements_test.txt
|
||||
setenv = PYTHONPATH = {toxinidir}
|
||||
passenv = PYTHONPATH = {toxinidir}
|
||||
|
||||
[testenv:black-inplace]
|
||||
description = Run black and edit all files in place
|
||||
skip_install = True
|
||||
deps = black
|
||||
commands = black magic_spoiler/
|
||||
|
||||
# Active Tests
|
||||
[testenv:yapf-inplace]
|
||||
description = Run yapf and edit all files in place
|
||||
skip_install = True
|
||||
deps = yapf
|
||||
commands = yapf --in-place --recursive --parallel magic_spoiler/
|
||||
|
||||
[testenv:mypy]
|
||||
description = mypy static type checking only
|
||||
deps = mypy
|
||||
commands = mypy {posargs:magic_spoiler/}
|
||||
|
||||
[testenv:lint]
|
||||
description = Run linting tools
|
||||
deps = pylint
|
||||
commands = pylint magic_spoiler/ --rcfile=.pylintrc
|
||||
|
||||
# Inactive Tests
|
||||
[testenv:yapf-check]
|
||||
description = Dry-run yapf to see if reformatting is needed
|
||||
skip_install = True
|
||||
deps = yapf
|
||||
# TODO make it error exit if there's a diff
|
||||
commands = yapf --diff --recursive --parallel magic_spoiler/
|
||||
|
||||
[testenv:isort-check]
|
||||
description = dry-run isort to see if imports need resorting
|
||||
deps = isort
|
||||
commands = isort --check-only
|
||||
|
||||
[testenv:isort-inplace]
|
||||
description = Sort imports
|
||||
deps = isort
|
||||
commands = isort -rc magic_spoiler/
|
||||
|
||||
[testenv:unit]
|
||||
description = Run unit tests with coverage and mypy type checking
|
||||
extras = dev
|
||||
deps = pytest
|
||||
commands = pytest --cov=magic_spoiler {posargs:tests/}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import yaml
|
||||
import sys
|
||||
|
||||
def load_file(input_file, lib_to_use):
|
||||
try:
|
||||
with open(input_file) as data_file:
|
||||
if lib_to_use == 'yaml':
|
||||
output_file = yaml.safe_load(data_file)
|
||||
elif lib_to_use == 'yaml_multi':
|
||||
output_file = []
|
||||
for doc in yaml.safe_load_all(data_file):
|
||||
output_file.append(doc)
|
||||
return output_file
|
||||
except Exception as ex:
|
||||
print "Unable to load file: " + input_file + "\nException information:\n" + str(ex.args)
|
||||
sys.exit("Unable to load file: " + input_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
setinfos = load_file('set_info.yml','yaml_multi')
|
||||
manual_sets = load_file('cards_manual.yml','yaml')
|
||||
card_corrections = load_file('cards_corrections.yml','yaml')
|
||||
delete_cards = load_file('cards_delete.yml','yaml')
|
||||
|
||||
print "Pre-flight: All input files loaded successfully."
|
||||
|
|
@ -1,269 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
from lxml import html
|
||||
from PIL import Image
|
||||
import os
|
||||
|
||||
|
||||
def scrape_fullspoil(url="http://magic.wizards.com/en/articles/archive/card-image-gallery/hour-devastation", setinfo={"code": "HOU"}, showRarityColors=False, showFrameColors=False, manual_cards=[], delete_cards=[]):
|
||||
if 'name' in setinfo:
|
||||
url = 'http://magic.wizards.com/en/articles/archive/card-image-gallery/' + setinfo['name'].lower().replace('of', '').replace(
|
||||
' ', ' ').replace(' ', '-')
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = []
|
||||
cardtree = tree.xpath('//*[@id="content-detail-page-of-an-article"]')
|
||||
for child in cardtree:
|
||||
cardElements = child.xpath('//*/p/img')
|
||||
cardcount = 0
|
||||
for cardElement in cardElements:
|
||||
card = {
|
||||
"name": cardElement.attrib['alt'].replace(u"\u2019", '\'').split(' /// ')[0],
|
||||
"img": cardElement.attrib['src']
|
||||
}
|
||||
card["url"] = card["img"]
|
||||
#card["cmc"] = 0
|
||||
#card["manaCost"] = ""
|
||||
#card["type"] = ""
|
||||
#card["types"] = []
|
||||
#card["text"] = ""
|
||||
#card["colorIdentity"] = [""]
|
||||
|
||||
# if card['name'] in split_cards:
|
||||
# card["names"] = [card['name'], split_cards[card['name']]]
|
||||
# card["layout"] = "split"
|
||||
#notSplit = True
|
||||
# for backsplit in split_cards:
|
||||
# if card['name'] == split_cards[backsplit]:
|
||||
# notSplit = False
|
||||
# if not card['name'] in delete_cards:
|
||||
cards.append(card)
|
||||
cardcount += 1
|
||||
fullspoil = {"cards": cards}
|
||||
print "Spoil Gallery has " + str(cardcount) + " cards."
|
||||
download_images(fullspoil['cards'], setinfo['code'])
|
||||
fullspoil = get_rarities_by_symbol(fullspoil, setinfo['code'])
|
||||
fullspoil = get_mana_symbols(fullspoil, setinfo['code'])
|
||||
#fullspoil = get_colors_by_frame(fullspoil, setinfo['code'])
|
||||
return fullspoil
|
||||
|
||||
|
||||
def get_rarities_by_symbol(fullspoil, setcode):
|
||||
symbolPixels = (240, 219, 242, 221)
|
||||
highVariance = 15
|
||||
colorAverages = {
|
||||
"Common": [30, 27, 28],
|
||||
"Uncommon": [121, 155, 169],
|
||||
"Rare": [166, 143, 80],
|
||||
"Mythic Rare": [201, 85, 14]
|
||||
}
|
||||
symbolCount = 0
|
||||
for card in fullspoil['cards']:
|
||||
try:
|
||||
cardImage = Image.open(
|
||||
'images/' + setcode + '/' + card['name'].replace(' // ', '') + '.jpg')
|
||||
except:
|
||||
continue
|
||||
pass
|
||||
if '//' in card['name']:
|
||||
setSymbol = cardImage.crop((240, 138, 242, 140))
|
||||
else:
|
||||
setSymbol = cardImage.crop(symbolPixels)
|
||||
cardHistogram = setSymbol.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
for color in colorAverages:
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][0] - reds)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][1] - greens)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][2] - blues)
|
||||
if colorVariance < variance:
|
||||
variance = colorVariance
|
||||
card['rarity'] = color
|
||||
if variance > highVariance:
|
||||
# if a card isn't close to any of the colors, it's probably a planeswalker? make it mythic.
|
||||
print card['name'], 'has high variance of', variance, ', closest rarity is', card['rarity']
|
||||
card['rarity'] = "Mythic Rare"
|
||||
# print card['name'], '$', reds, greens, blues
|
||||
if symbolCount < 10:
|
||||
setSymbol.save(
|
||||
'images/' + card['name'].replace(' // ', '') + '.symbol.jpg')
|
||||
symbolCount += 1
|
||||
return fullspoil
|
||||
|
||||
|
||||
def get_colors_by_frame(fullspoil, setcode):
|
||||
framePixels = (20, 11, 76, 16)
|
||||
highVariance = 10
|
||||
colorAverages = {
|
||||
"White": [231, 225, 200],
|
||||
"Blue": [103, 193, 230],
|
||||
"Black": [58, 61, 54],
|
||||
"Red": [221, 122, 101],
|
||||
"Green": [118, 165, 131],
|
||||
"Multicolor": [219, 200, 138],
|
||||
"Artifact": [141, 165, 173],
|
||||
"Colorless": [216, 197, 176],
|
||||
}
|
||||
symbolCount = 0
|
||||
for card in fullspoil['cards']:
|
||||
try:
|
||||
cardImage = Image.open(
|
||||
'images/' + setcode + '/' + card['name'].replace(' // ', '') + '.jpg')
|
||||
except:
|
||||
continue
|
||||
pass
|
||||
cardColor = cardImage.crop(framePixels)
|
||||
|
||||
cardHistogram = cardColor.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
for color in colorAverages:
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][0] - reds)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][1] - greens)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][2] - blues)
|
||||
if colorVariance < variance:
|
||||
variance = colorVariance
|
||||
card['colors'] = [color]
|
||||
return fullspoil
|
||||
|
||||
|
||||
def get_mana_symbols(fullspoil={}, setcode="HOU"):
|
||||
manaBoxes = [(234, 23, 244, 33), (220, 23, 230, 33),
|
||||
(206, 23, 216, 33), (192, 23, 202, 33), (178, 23, 188, 33)]
|
||||
highVariance = 0
|
||||
colorAverages = {
|
||||
"W": [126, 123, 110],
|
||||
"U": [115, 140, 151],
|
||||
"B": [105, 99, 98],
|
||||
"R": [120, 89, 77],
|
||||
"G": [65, 78, 69],
|
||||
"1": [162, 156, 154],
|
||||
"2": [155, 148, 147],
|
||||
"3": [160, 153, 152],
|
||||
"4": [149, 143, 141],
|
||||
"5": [155, 149, 147],
|
||||
"6": [151, 145, 143],
|
||||
"7": [169, 163, 161],
|
||||
"X": [160, 154, 152]
|
||||
}
|
||||
for card in fullspoil['cards']:
|
||||
try:
|
||||
cardImage = Image.open(
|
||||
'images/' + setcode + '/' + card['name'].replace(' // ', '') + '.jpg')
|
||||
except:
|
||||
continue
|
||||
pass
|
||||
card['manaCost'] = ""
|
||||
for manaBox in manaBoxes:
|
||||
manaSymbol = cardImage.crop(manaBox)
|
||||
cardHistogram = manaSymbol.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
for color in colorAverages:
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][0] - reds)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][1] - greens)
|
||||
colorVariance = colorVariance + \
|
||||
abs(colorAverages[color][2] - blues)
|
||||
if colorVariance < variance:
|
||||
variance = colorVariance
|
||||
closestColor = color
|
||||
if variance < 10:
|
||||
# if card['name'] in ["Mirage Mirror", "Uncage the Menagerie", "Torment of Hailfire"]:
|
||||
# print card['name'] + " " + str(reds) + " " + str(greens) + " " + str(blues)
|
||||
if closestColor in ["2", "5"]:
|
||||
twoVSfive = (
|
||||
manaBox[0] + 1, manaBox[1] + 4, manaBox[2] - 5, manaBox[3] - 2)
|
||||
manaSymbol = cardImage.crop(twoVSfive)
|
||||
cardHistogram = manaSymbol.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(
|
||||
i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(
|
||||
i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(
|
||||
i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + abs(175 - reds)
|
||||
colorVariance = colorVariance + abs(168 - greens)
|
||||
colorVariance = colorVariance + abs(166 - blues)
|
||||
if colorVariance < 10:
|
||||
closestColor = "2"
|
||||
elif colorVariance > 110 and colorVariance < 120:
|
||||
closestColor = "5"
|
||||
else:
|
||||
continue
|
||||
card['manaCost'] = closestColor + card['manaCost']
|
||||
return fullspoil
|
||||
|
||||
|
||||
def smash_fullspoil(mtgjson, fullspoil):
|
||||
different_keys = {}
|
||||
for mtgjson_card in mtgjson['cards']:
|
||||
for fullspoil_card in fullspoil['cards']:
|
||||
if mtgjson_card['name'] == fullspoil_card['name']:
|
||||
for key in fullspoil_card:
|
||||
if key in mtgjson_card:
|
||||
if mtgjson_card[key] != fullspoil_card[key] and key != 'colors':
|
||||
if not fullspoil_card['name'] in different_keys:
|
||||
different_keys[fullspoil_card['name']] = {
|
||||
key: fullspoil_card[key]}
|
||||
else:
|
||||
different_keys[fullspoil_card['name']
|
||||
][key] = fullspoil_card[key]
|
||||
for fullspoil_card in fullspoil['cards']:
|
||||
WOTC_only = []
|
||||
match = False
|
||||
for mtgjson_card in mtgjson['cards']:
|
||||
if mtgjson_card['name'] == fullspoil_card['name']:
|
||||
match = True
|
||||
if not match:
|
||||
WOTC_only.append(fullspoil_card['name'])
|
||||
if len(WOTC_only) > 0:
|
||||
print "WOTC only cards: "
|
||||
print WOTC_only
|
||||
print different_keys
|
||||
|
||||
|
||||
def download_images(mtgjson, setcode):
|
||||
if not os.path.isdir('images/' + setcode):
|
||||
os.makedirs('images/' + setcode)
|
||||
if 'cards' in mtgjson:
|
||||
jsoncards = mtgjson['cards']
|
||||
else:
|
||||
jsoncards = mtgjson
|
||||
for card in jsoncards:
|
||||
if card['url']:
|
||||
if os.path.isfile('images/' + setcode + '/' + card['name'].replace(' // ', '') + '.jpg'):
|
||||
continue
|
||||
# print 'Downloading ' + card['url'] + ' to images/' + setcode + '/' + card['name'].replace(' // ','') + '.jpg'
|
||||
requests.get(card['url'], 'images/' + setcode +
|
||||
'/' + card['name'].replace(' // ', '') + '.jpg')
|
||||
Loading…
Reference in New Issue
Block a user