mirror of
https://github.com/Cockatrice/Magic-Spoiler.git
synced 2026-03-22 02:06:17 -05:00
Compare commits
448 Commits
v0.0613.00
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4392c34919 | ||
|
|
f8cdbc9a7f | ||
|
|
4cef743e46 | ||
|
|
2f76311165 | ||
|
|
1060f659a5 | ||
|
|
2f23aa93c6 | ||
|
|
34068d4d5e | ||
|
|
daa0f09a5e | ||
|
|
85e4bd093b | ||
|
|
257282374f | ||
|
|
f3ac505dac | ||
|
|
10aceba7a9 | ||
|
|
6a14f8889e | ||
|
|
04150f692e | ||
|
|
d74a762f35 | ||
|
|
cae671b37f | ||
|
|
6d0ca400be | ||
|
|
0b4b3bad0f | ||
|
|
d13dc6b799 | ||
|
|
4d049b92dc | ||
|
|
8459b3bfdd | ||
|
|
b7c6f5e896 | ||
|
|
7678fadeca | ||
|
|
0efd986a16 | ||
|
|
c7a7a66523 | ||
|
|
c619c4ec0b | ||
|
|
ab0ad6f6bf | ||
|
|
bdb0c5367e | ||
|
|
35c063b909 | ||
|
|
2f5bcd39c5 | ||
|
|
fee97dcfa9 | ||
|
|
9df6be76c8 | ||
|
|
3dd54f59d8 | ||
|
|
5e716ecdfd | ||
|
|
4681501866 | ||
|
|
9eab59de79 | ||
|
|
4692b6230e | ||
|
|
6a92707354 | ||
|
|
ef39de857f | ||
|
|
c4764f3c17 | ||
|
|
e9d8f09136 | ||
|
|
8deb35f745 | ||
|
|
bd910e37de | ||
|
|
bb7980bec2 | ||
|
|
f17023cf47 | ||
|
|
bb0e3b89ea | ||
|
|
c0d726a9e2 | ||
|
|
a49a417e02 | ||
|
|
32fc4ef4fc | ||
|
|
5d94c7a3d5 | ||
|
|
c8b5317293 | ||
|
|
5e6d9ad5f7 | ||
|
|
a33e90021b | ||
|
|
d234a9ccef | ||
|
|
13023af780 | ||
|
|
433f469140 | ||
|
|
b6b518b1d9 | ||
|
|
9697c74d08 | ||
|
|
bf02be35c8 | ||
|
|
0610f60c9f | ||
|
|
8afb4cee34 | ||
|
|
d54f412793 | ||
|
|
1adfed5fc6 | ||
|
|
ef0ced8d69 | ||
|
|
929a04c915 | ||
|
|
8bceceae6e | ||
|
|
bcfb1038c7 | ||
|
|
47c3b43963 | ||
|
|
764ff7203c | ||
|
|
6fd325b176 | ||
|
|
4791c9e2d2 | ||
|
|
b18bc002d9 | ||
|
|
4db482a3c8 | ||
|
|
8a048f0770 | ||
|
|
4dc0fb8761 | ||
|
|
15ae2677cd | ||
|
|
ff54e754c8 | ||
|
|
d365ca0375 | ||
|
|
bb2d56df14 | ||
|
|
26e1e22ad0 | ||
|
|
4ea02869be | ||
|
|
31ba735d95 | ||
|
|
0f4ec17eb6 | ||
|
|
103279671d | ||
|
|
64c7ba430f | ||
|
|
4b25618657 | ||
|
|
c24ab7574d | ||
|
|
b5d1d938a8 | ||
|
|
a242ebb1d1 | ||
|
|
a73195f022 | ||
|
|
fd65e743cc | ||
|
|
5dd5d37680 | ||
|
|
7c791d4f01 | ||
|
|
9e73420094 | ||
|
|
9481e5ff24 | ||
|
|
91a615f77d | ||
|
|
37f2d8e19b | ||
|
|
db458c19b7 | ||
|
|
7287cb5fbc | ||
|
|
7a138354f4 | ||
|
|
da60525795 | ||
|
|
a4bdf439ec | ||
|
|
aec3dd7618 | ||
|
|
8cfbdb7d87 | ||
|
|
7fb119e52e | ||
|
|
9f3d1839e9 | ||
|
|
ce18105f1f | ||
|
|
e49f6a46cd | ||
|
|
a1e2ac1893 | ||
|
|
eeb03ff0cd | ||
|
|
b29407d718 | ||
|
|
f34decf10a | ||
|
|
33896b5a78 | ||
|
|
54fc3aae83 | ||
|
|
69fac381f9 | ||
|
|
a54349b037 | ||
|
|
4458316d62 | ||
|
|
c136e4122e | ||
|
|
f4e6d80e89 | ||
|
|
66b2ff35cc | ||
|
|
65d4d5afdb | ||
|
|
c0a584bfbd | ||
|
|
c28882d0fd | ||
|
|
d3c20d34d1 | ||
|
|
9762c04041 | ||
|
|
a2865b2de0 | ||
|
|
557fd2a4dc | ||
|
|
c09a073fe1 | ||
|
|
d57d0c9278 | ||
|
|
ed895ebf39 | ||
|
|
b2891a2808 | ||
|
|
968e8fff47 | ||
|
|
e148d8d186 | ||
|
|
1d6dcc0e07 | ||
|
|
b6d3875527 | ||
|
|
c7da69bfab | ||
|
|
5dff95b3df | ||
|
|
0f794b243e | ||
|
|
5028791018 | ||
|
|
7b2c9801c7 | ||
|
|
5c646eb10b | ||
|
|
871d39d56b | ||
|
|
a497d7e81a | ||
|
|
297408ca83 | ||
|
|
ad229b3227 | ||
|
|
c7a64d89e6 | ||
|
|
83d04f0242 | ||
|
|
3eb796cbff | ||
|
|
9025ac85ee | ||
|
|
50f9a0f421 | ||
|
|
d983fab920 | ||
|
|
b9d2034948 | ||
|
|
cb88104849 | ||
|
|
c9fc1b2447 | ||
|
|
f9951aedb9 | ||
|
|
5629d02f3c | ||
|
|
1a7f698e58 | ||
|
|
84b24026e5 | ||
|
|
68c7150ac1 | ||
|
|
b2c858f4c6 | ||
|
|
e882cd6500 | ||
|
|
77744522b3 | ||
|
|
3538edc9cd | ||
|
|
9cefe75576 | ||
|
|
510fb05931 | ||
|
|
15ca5a4721 | ||
|
|
0da1b24cdc | ||
|
|
68ef367ea9 | ||
|
|
2f06dcc95d | ||
|
|
20212b176f | ||
|
|
b45f57f5c6 | ||
|
|
3b01ffbec8 | ||
|
|
6480752ede | ||
|
|
0b021b377f | ||
|
|
aad0479ca1 | ||
|
|
9c1ace825f | ||
|
|
667b49019f | ||
|
|
34c84e0b16 | ||
|
|
3292ebc731 | ||
|
|
def50915bc | ||
|
|
7c0cec2749 | ||
|
|
6ed303f75e | ||
|
|
f6af154852 | ||
|
|
be008917ca | ||
|
|
12a2b48a90 | ||
|
|
5cda79a339 | ||
|
|
0f6766945d | ||
|
|
8e65cfe357 | ||
|
|
28f19a2071 | ||
|
|
73b7c933a3 | ||
|
|
91cf650082 | ||
|
|
a896fef12e | ||
|
|
075dd7a40d | ||
|
|
4767cdffc9 | ||
|
|
41011824b6 | ||
|
|
4b11546a8a | ||
|
|
d83827b4de | ||
|
|
0cb9bb1948 | ||
|
|
4a79afc0de | ||
|
|
b419569752 | ||
|
|
00c4a45663 | ||
|
|
e2906c31da | ||
|
|
039a179b41 | ||
|
|
69cbce049c | ||
|
|
b2c33879cb | ||
|
|
2826ce15b9 | ||
|
|
0a7ad363f6 | ||
|
|
250dcc10a6 | ||
|
|
d089a398d3 | ||
|
|
5364e9fb95 | ||
|
|
bd81ece2dc | ||
|
|
0a7561970e | ||
|
|
1a6dbebdb5 | ||
|
|
36c4eb7540 | ||
|
|
292c7fc432 | ||
|
|
711a349e9e | ||
|
|
f1bb2c2d36 | ||
|
|
be97d7d075 | ||
|
|
32f2cc48dc | ||
|
|
e31191b7e0 | ||
|
|
050711b393 | ||
|
|
7cf711f8f1 | ||
|
|
c6fcc11cdd | ||
|
|
f31f5f665c | ||
|
|
6259101c3c | ||
|
|
624b586014 | ||
|
|
85eb8ae9cc | ||
|
|
9e5419fb46 | ||
|
|
b65c812c02 | ||
|
|
875884fc9b | ||
|
|
dec0718b83 | ||
|
|
b82a91f1e7 | ||
|
|
18ce0e2062 | ||
|
|
fdb26636a2 | ||
|
|
b4f829a9e7 | ||
|
|
626457372b | ||
|
|
19de03adce | ||
|
|
f95b4bfd84 | ||
|
|
21f66c65b6 | ||
|
|
5b20149518 | ||
|
|
0ab2e3ebce | ||
|
|
2957b1adc5 | ||
|
|
38ab12a93b | ||
|
|
be76183d14 | ||
|
|
432ba1d028 | ||
|
|
3279ea743e | ||
|
|
c90940d91a | ||
|
|
f81be4e592 | ||
|
|
512ea06646 | ||
|
|
e07a61781e | ||
|
|
b997e04b4c | ||
|
|
8433cf900b | ||
|
|
09044c54ee | ||
|
|
10b5ce3698 | ||
|
|
275cedf42b | ||
|
|
027493584a | ||
|
|
4799e92600 | ||
|
|
ab6d495839 | ||
|
|
85f5d4043d | ||
|
|
a4a71e9779 | ||
|
|
d90d05b942 | ||
|
|
93b65146ec | ||
|
|
7243182be3 | ||
|
|
2f5ccb4887 | ||
|
|
30d2ce5d82 | ||
|
|
a791946828 | ||
|
|
075faa71ac | ||
|
|
0a258379fb | ||
|
|
2f54837d88 | ||
|
|
d09248b837 | ||
|
|
0bc5aca633 | ||
|
|
3849e977a0 | ||
|
|
66871a12f2 | ||
|
|
78f5dd2df9 | ||
|
|
dea584cb35 | ||
|
|
b1904667e4 | ||
|
|
fa10639000 | ||
|
|
0fd2ead70d | ||
|
|
78776169ab | ||
|
|
50294c8f30 | ||
|
|
da4a8ba28b | ||
|
|
16bbda2c22 | ||
|
|
9545f30b99 | ||
|
|
5368ea3368 | ||
|
|
fd228f72e0 | ||
|
|
9e8c20fd06 | ||
|
|
37d12eb5d9 | ||
|
|
1d443142ac | ||
|
|
01fcaec7c9 | ||
|
|
1b549638dc | ||
|
|
1f0f3129a0 | ||
|
|
9d00b4f05a | ||
|
|
00dd79bf14 | ||
|
|
9693dad628 | ||
|
|
c036cefbf7 | ||
|
|
d042f71009 | ||
|
|
bbf48cf077 | ||
|
|
588072b9b2 | ||
|
|
4dd80ac308 | ||
|
|
77ec60570e | ||
|
|
ff26d45bd0 | ||
|
|
0293efcea0 | ||
|
|
0c6b894f07 | ||
|
|
c98dd74e48 | ||
|
|
d64c1f7c48 | ||
|
|
9d5f723fc8 | ||
|
|
e4a5b51307 | ||
|
|
1e52497bc3 | ||
|
|
39a6ba1464 | ||
|
|
f561e483aa | ||
|
|
0b0c19ca06 | ||
|
|
2b41643255 | ||
|
|
b1693f3c30 | ||
|
|
a58e8fac01 | ||
|
|
a3a3fc74e3 | ||
|
|
2457a5bd34 | ||
|
|
88c93c013d | ||
|
|
529a4da3d9 | ||
|
|
f74bd35c8d | ||
|
|
39f6243846 | ||
|
|
846a41d2e2 | ||
|
|
4d62dcf946 | ||
|
|
291efdb19e | ||
|
|
c13a719944 | ||
|
|
9004f6d285 | ||
|
|
04e6a1892f | ||
|
|
3ee6aa3842 | ||
|
|
dc9b9b7a48 | ||
|
|
599aaee733 | ||
|
|
c40355f0fb | ||
|
|
5b987d28cf | ||
|
|
10aed51df3 | ||
|
|
146db4f741 | ||
|
|
8900c1f8af | ||
|
|
1dd538d5a1 | ||
|
|
2af17727a4 | ||
|
|
fc916a58f4 | ||
|
|
d99d2f7c91 | ||
|
|
9862add338 | ||
|
|
80c09b1e88 | ||
|
|
91cc9559e5 | ||
|
|
626fa6be9d | ||
|
|
324fcb6bbb | ||
|
|
7bde6d0ea8 | ||
|
|
f320660a22 | ||
|
|
5c5699d776 | ||
|
|
cebc4ffd45 | ||
|
|
86bfa1f0de | ||
|
|
d6853982e9 | ||
|
|
fe50a05225 | ||
|
|
4e43b90156 | ||
|
|
d8d31f4aab | ||
|
|
35f5891253 | ||
|
|
07d5a8a57a | ||
|
|
876d3a800f | ||
|
|
82186fdf2e | ||
|
|
9f896e1a0a | ||
|
|
6b440d6565 | ||
|
|
5b0ef041ab | ||
|
|
e40a7063b8 | ||
|
|
3caffc6420 | ||
|
|
4043729cc2 | ||
|
|
d14320b07d | ||
|
|
88501f8ce2 | ||
|
|
883af49ac8 | ||
|
|
813ded7802 | ||
|
|
ea7c9228be | ||
|
|
c32b280391 | ||
|
|
fe96df161d | ||
|
|
869ba84a19 | ||
|
|
ef744d82ed | ||
|
|
0f77039fe7 | ||
|
|
780a7c7715 | ||
|
|
ec7270c524 | ||
|
|
ee61c68cba | ||
|
|
1ad19e2d30 | ||
|
|
c5ca8dd879 | ||
|
|
be0d113435 | ||
|
|
248e09219e | ||
|
|
3efba33849 | ||
|
|
55fe07b63b | ||
|
|
5f56b74952 | ||
|
|
498cfffe42 | ||
|
|
60279ab3f9 | ||
|
|
ba4fdfc9f2 | ||
|
|
82a74439b8 | ||
|
|
82ae195d31 | ||
|
|
6d21eb5ca8 | ||
|
|
ab75dd2104 | ||
|
|
be0a51267c | ||
|
|
82836d7141 | ||
|
|
3f83451020 | ||
|
|
09b8db64aa | ||
|
|
1611302326 | ||
|
|
eba81f48d7 | ||
|
|
839ce66d3a | ||
|
|
64c9a15bc3 | ||
|
|
6e9e0b4afa | ||
|
|
0f2d772821 | ||
|
|
b471f1073d | ||
|
|
3572607c47 | ||
|
|
5d48d567c3 | ||
|
|
75421ddd2d | ||
|
|
c68da7ccab | ||
|
|
45de69d330 | ||
|
|
47234b17d2 | ||
|
|
45d2233058 | ||
|
|
f7e71f4ca1 | ||
|
|
a70b518f30 | ||
|
|
4f49559751 | ||
|
|
8d96f7070f | ||
|
|
ba87735c6a | ||
|
|
9f51dce940 | ||
|
|
fbdee30d65 | ||
|
|
ef4f2c7377 | ||
|
|
4ee8c132eb | ||
|
|
9fbd0c37f8 | ||
|
|
3084417241 | ||
|
|
6a9ef7bfd0 | ||
|
|
094ca85029 | ||
|
|
aac7664caf | ||
|
|
b426dc2662 | ||
|
|
f8c97d551e | ||
|
|
9f1b096e0c | ||
|
|
8cdcfa6b51 | ||
|
|
b914c445d1 | ||
|
|
f03a49b2bf | ||
|
|
b170d6292e | ||
|
|
d793ac5bbe | ||
|
|
5e07f55a2c | ||
|
|
1bf91f9424 | ||
|
|
258384990f | ||
|
|
d83adfe084 | ||
|
|
71663e2adc | ||
|
|
edd0afdf77 | ||
|
|
d4ca443300 | ||
|
|
3aa5061712 | ||
|
|
aa7ea2270b | ||
|
|
b6a1204c79 | ||
|
|
534f197f18 | ||
|
|
52b0b42cb3 | ||
|
|
96da921b98 | ||
|
|
74f6583dd5 | ||
|
|
45f55a8a28 | ||
|
|
3f976fe9d5 | ||
|
|
f7d1f54784 | ||
|
|
4c882f3597 | ||
|
|
eb4ef2a0c2 |
30
.github/CONTRIBUTING.md
vendored
30
.github/CONTRIBUTING.md
vendored
|
|
@ -1,17 +1,25 @@
|
|||
# Contributing to SpoilerSeason #
|
||||
Thank you for your interest in contributing to SpoilerSeason!
|
||||
This project is an attempt to create a central source for new Magic: the Gathering spoilers and provide data files for miscellaneous projects including our friends over at [Cockatrice](https://github.com/Cockatrice/Cockatrice)
|
||||
# Contributing to Magic-Spoiler #
|
||||
Thank you for your interest in contributing to Magic-Spoiler!<br>
|
||||
This project is an attempt to create a central source for new Magic: the Gathering spoilers and provide data files for miscellaneous projects like [Cockatrice](https://github.com/Cockatrice/Cockatrice).
|
||||
|
||||
|
||||
## How can I help? ##
|
||||
SpoilerSeason grabs its data from many sources, but those sources often contain errors. If you just want to improve the card data and fix errors, you can start in the [errors.json](https://github.com/Cockatrice/Magic-Spoiler/blob/files/errors.json) file in the [files branch](https://github.com/Cockatrice/Magic-Spoiler/tree/files) or our [issue tracker.](https://github.com/Cockatrice/Magic-Spoiler/issues)
|
||||
Once you've found an error, whether it be in the errors.json file or from using the data files, make sure that error hasn't already been fixed in the appropriate file on the [files branch.](https://github.com/Cockatrice/Magic-Spoiler/tree/files) If it's still present, let's get it fixed!
|
||||
- If the error is with one of the fields in a card, check our [cards_corrections.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_corrections.json) file. The syntax for this file is `"cardname": { "field to correct": "new value" }` If you're fixing the card name, you'd put the bad card name as `cardname`
|
||||
- If the card shouldn't exist at all, check the [cards_delete.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_delete.json) file. This file is just an array of cards to delete. Card name is case sensitive!
|
||||
- If the card is a legitimate spoiler and it isn't showing up yet, you can manually add it. The file you want is [cards_manual.json](https://github.com/Cockatrice/Magic-Spoiler/blob/master/cards_manual.json) Make sure you link the spoil source in your Push Request.
|
||||
Magic-Spoiler grabs its data from [Scryfall](https://scryfall.com/), but there can be errors of course.
|
||||
If you want to improve the card data and fix errors for all users, you simply have to report them directly to Scryfall.
|
||||
Once you've found a mistake in our data files, make sure that error hasn't already been fixed at the Scryfall webpage in betweeen. If it's still present there, let's get it fixed!
|
||||
- If the error is with one of the fields in a card (e.g. a spelling error or missing cmc) search for that card on the Scryfall webpage. Below the card art on the left, there are some links. Choose the botton one (`Report card issue`) and provide the information in the form. Once their team check & fixes the errors, it'll show up in our spoiler files, too.<br>
|
||||
It only takes a few days - be patient.
|
||||
- If the card is a legitimate spoiler and it isn't showing up yet, you can request it by [contacting the Scryfall support](https://scryfall.com/contact) and let them know. Make sure to link the official spoiler source in your report.
|
||||
- If the card shouldn't exist at all, let the Scryfall team know as well, please.
|
||||
|
||||
What you should **NOT** do however, is to submit PR's to our files branch and fix the xml files there directly.<br>
|
||||
You have to provide updates to Scryfall as all other changes would get overridden again.
|
||||
|
||||
All Push requests for card fixes should have the name of the card being fixed and the type of fix (fix/correction, delete, or manual). In the details of the PR, you **MUST INCLUDE A VALID LINK** to the page the spoiler is located at. For minor fixes, a link to the card image is OK. And of course link the issue you're fixing if there is one!
|
||||
|
||||
## Anything else? ##
|
||||
If you notice errors, please file an [issue](https://github.com/Cockatrice/Magic-Spoiler/issues)
|
||||
If you notice any other errors or have suggestions to the code, please [file an issue](https://github.com/Cockatrice/Magic-Spoiler/issues) in our repository.
|
||||
We try to follow [PEP8 Style Guide](https://peps.python.org/pep-0008/).
|
||||
|
||||
Code improvement PRs are always welcome!
|
||||
<br>
|
||||
|
||||
**Code improvement PRs are always welcome!**
|
||||
|
|
|
|||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# Configuration options: https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# Enable version updates for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
# Directory must be set to "/" to check for workflow files in .github/workflows
|
||||
directory: "/"
|
||||
# Check for updates to GitHub Actions once a week
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# Limit the amout of open PR's (default = 5, disabled = 0, security updates are not impacted)
|
||||
open-pull-requests-limit: 2
|
||||
73
.github/workflows/deploy.yml
vendored
Normal file
73
.github/workflows/deploy.yml
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
name: Deploy
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
schedule:
|
||||
# Every 8 hours = 3 times a day
|
||||
- cron: '0 */8 * * *'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
# Do not run the scheduled workflow on forks
|
||||
if: github.event_name != 'schedule' || github.repository_owner == 'Cockatrice'
|
||||
|
||||
name: Check for new spoiler
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
DEPLOY: ${{github.ref == 'refs/heads/master'}}
|
||||
OUTPUT_PATH: out
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Checkout output branch
|
||||
# Run only when triggered from master
|
||||
if: env.DEPLOY == 'true'
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: files
|
||||
path: ${{env.OUTPUT_PATH}}
|
||||
|
||||
- name: Install requirements using pip
|
||||
shell: bash
|
||||
run: python3 -m pip install --requirement requirements.txt
|
||||
|
||||
- name: Run script
|
||||
id: run
|
||||
shell: bash
|
||||
run: python3 -m magic_spoiler
|
||||
|
||||
- name: Upload artifacts
|
||||
# Run only when triggered from a PR
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: spoiler-output
|
||||
path: ${{github.workspace}}/${{env.OUTPUT_PATH}}
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Deploy changes
|
||||
# Run only when triggered from master and changes are available
|
||||
if: env.DEPLOY == 'true' && steps.run.outputs.deploy == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{env.OUTPUT_PATH}}
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add "*.xml" SpoilerSeasonEnabled
|
||||
git commit -m "Deploy: $GITHUB_SHA"
|
||||
git push
|
||||
deploy_commit=`git rev-parse HEAD`
|
||||
echo "::notice title=New data uploaded::See deployment: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/commit/$deploy_commit"
|
||||
40
.gitignore
vendored
40
.gitignore
vendored
|
|
@ -1,7 +1,6 @@
|
|||
# Project specific
|
||||
out/
|
||||
AllSets.pre.json
|
||||
deploy_key.enc
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
@ -13,19 +12,19 @@ __pycache__/
|
|||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
env
|
||||
build
|
||||
develop-eggs
|
||||
dist
|
||||
downloads
|
||||
eggs
|
||||
.eggs
|
||||
lib
|
||||
lib64
|
||||
parts
|
||||
sdist
|
||||
var
|
||||
*.egg-info
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
|
|
@ -84,8 +83,11 @@ celerybeat-schedule
|
|||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
venv
|
||||
ENV
|
||||
bin
|
||||
include
|
||||
pyvenv.cfg
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
|
@ -95,3 +97,9 @@ ENV/
|
|||
|
||||
# JetBrains
|
||||
.idea
|
||||
|
||||
#Mac Stuff
|
||||
.DS_Store
|
||||
|
||||
*.sqlite
|
||||
.*_cache
|
||||
|
|
|
|||
68
.pylintrc
Normal file
68
.pylintrc
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
[MASTER]
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=
|
||||
bad-continuation,
|
||||
fixme,
|
||||
line-too-long,
|
||||
localled-enabled,
|
||||
locally-disabled,
|
||||
logging-format-interpolation,
|
||||
too-few-public-methods,
|
||||
too-many-statements,
|
||||
wrong-import-order,
|
||||
too-many-branches,
|
||||
import-error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio).You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=colorized
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=
|
||||
f,
|
||||
i,
|
||||
j,
|
||||
k,
|
||||
_,
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=__.*__|test_.*
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=
|
||||
FIXME,
|
||||
XXX,
|
||||
TODO,
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)
|
||||
26
.travis.yml
26
.travis.yml
|
|
@ -1,26 +0,0 @@
|
|||
language: python
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
script: bash ./deploy.sh
|
||||
notifications:
|
||||
email: false
|
||||
webhooks:
|
||||
urls:
|
||||
- https://webhooks.gitter.im/e/691b9acffe1def5f9d6b
|
||||
on_success: always # options: [always|never|change] default: always
|
||||
on_failure: always # options: [always|never|change] default: always
|
||||
on_start: always # options: [always|never|change] default: always
|
||||
deploy:
|
||||
provider: releases
|
||||
api_key:
|
||||
secure: Bin220gU9Re176T/1bIaX/rhGB+uEaw13aoB2/ir0ePHQB0ihasEJcsgmlN8kz93KSN6vp4y2HwMLnz3t7Pn0amTV8QVL/AlOOzjbq8m/1kYTbXdPlYnMsosZPVFLHRan4LEGFsovRia6LO4p9fqC8BDgQl89W/88PlYAMWzao5jTyKKHp8o+sioYhKj9D+86lxLYspQ+6SN0HOCnF2MZ/vZtxbY32ituswAc/sJK1MtZ/PExoMe1nSI2iKCaatXyKA+FVCUNLHRAu4LgB1GfJCLpmlPbvjud8A6WAKNF6poNCvFck+Ox56tt4bw3ggR5W9kTEhvX74l6AEeC7Qz6bHjh1CEngrqFjyaHy25CcygWgagf0DUsvyGRS0RqEx4bz9psD09d+oWihdkJMfa5kRzXtVQD8sxDgsBqEz/DjsMIlf/L5ISSa7lAYiqq65ELpezBFOlvEZ9avOYLcZc7m5/5ZhtcA4HPSqzfn2nhkPpeggBKufMdyc8JIDkvs/JlFsNu46QVvugjbdGvtb4SlQK310py0TOA6nYt7WntDhX3SukKAeh6oHjZaL5aeoSBhnlQRgJfDBqI3+7anLatD30uEKCMp5sWcLrjB1HO9ZH5nceWBg4cMKJvI/zT77h96fCy7uMkPNt867GP8O9KkWVWzxGBkpIdstigNWfT5g=
|
||||
file_glob: true
|
||||
file: out/*.xml
|
||||
skip_cleanup: true
|
||||
overwrite: true
|
||||
on:
|
||||
tags: true
|
||||
env:
|
||||
global:
|
||||
- ENCRYPTION_LABEL: ec68c19ba263
|
||||
- COMMIT_AUTHOR_EMAIL: you@example.com
|
||||
52
README.md
52
README.md
|
|
@ -1,23 +1,36 @@
|
|||
# Magic-Spoiler [](https://gitter.im/Cockatrice/Magic-Spoiler) #
|
||||

|
||||
|
||||
Magic-Spoiler is a python script to scrape MTGS, Scryfall, and Wizards.com to compile a cockatrice-friendly XML file as well as json files.
|
||||
[)](https://github.com/Cockatrice/Magic-Spoiler/tree/files) [](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)<br>
|
||||
[](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)<br>
|
||||
[)](https://github.com/Cockatrice/Magic-Spoiler/blob/files/spoiler.xml)
|
||||
|
||||
## Output [](https://travis-ci.org/Cockatrice/Magic-Spoiler) ##
|
||||
Just looking for XML or JSON files? [They're in our files branch!](https://github.com/Cockatrice/Magic-Spoiler/tree/files)
|
||||
<br>
|
||||
|
||||
## Errors ##
|
||||
Noticed an error? Check out our [Contributing file](https://github.com/Cockatrice/Magic-Spoiler/blob/master/.github/CONTRIBUTING.md) for information on how to help!
|
||||
# Magic-Spoiler [](https://discord.gg/3Z9yzmA) #
|
||||
|
||||
Magic-Spoiler is a Python script to query the [Scryfall](https://scryfall.com) API to compile XML files (Cockatrice formatted) with information about spoiled cards from upcoming sets.
|
||||
|
||||
## Output [](https://github.com/Cockatrice/Magic-Spoiler/actions/workflows/deploy.yml?query=branch%3Amaster) ##
|
||||
|
||||
>[!TIP]
|
||||
>**Enable "Download Spoilers Automatically" in `Cockatrice → Settings → Card Sources → Spoilers` to get updates automatically pushed to your client!**<br>
|
||||
You can also [add the desired <b>.xml</b> file(s) to your <i>customsets</i> folder manually](https://github.com/Cockatrice/Cockatrice/wiki/Custom-Cards-&-Sets#to-add-custom-sets-follow-these-steps) to make Cockatrice use them.
|
||||
|
||||
Just looking for XML files? [They are in our `files` branch!](https://github.com/Cockatrice/Magic-Spoiler/tree/files)
|
||||
|
||||
When run by our CI, the script automatically updates the files and uploads new versions to this branch. ([History of changes](https://github.com/Cockatrice/Magic-Spoiler/commits/files))<br>
|
||||
GitHub Actions are scheduled to automatically run three times a day.
|
||||
|
||||
## Contributing ##
|
||||
Noticed an error in the card data? Check out our [Contributing file](https://github.com/Cockatrice/Magic-Spoiler/blob/master/.github/CONTRIBUTING.md) for information on how to help fixing it!
|
||||
|
||||
We do happily accept PR's that improve our script as well!
|
||||
|
||||
## Running ##
|
||||
|
||||
### Requirements ###
|
||||
* Python 2.7
|
||||
* Python Modules:
|
||||
requests==2.13.0
|
||||
feedparser
|
||||
lxml
|
||||
Pillow
|
||||
datetime
|
||||
* Python 3.6
|
||||
* several Python Modules (see [requirements.txt](https://github.com/Cockatrice/Magic-Spoiler/blob/master/requirements.txt))
|
||||
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
|
|
@ -26,13 +39,14 @@ pip install -r requirements.txt
|
|||
### Usage ###
|
||||
|
||||
```
|
||||
$> python main.py
|
||||
$> python -m magic_spoiler
|
||||
```
|
||||
|
||||
Outputs out/{SETCODE}.xml, out/MPS\_{SETCODE}.xml, out/{SETCODE}.json, out/{MPS\_{SETCODE}.json
|
||||
### Output ###
|
||||
|
||||
errors are logged to out/errors.json
|
||||
All spoiler files are written to the `out/` directory:
|
||||
|
||||
Add the set xml file to your `customsets` folder for Cockatrice.
|
||||
|
||||
When run by travis, uploads all files to [files branch](https://github.com/Cockatrice/Magic-Spoiler/tree/files)
|
||||
| File Name | Content |
|
||||
|:--|:--|
|
||||
| `spoiler.xml` | file contains **all** currently available spoilers from different **sets** |
|
||||
| `{SET_CODE}.xml` | files contain just the spoiler available for this **single set** |
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
{
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
[]
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
{
|
||||
"meta": {
|
||||
"instructions": "If you would like to add a card manually, use the 'blank card' template below.",
|
||||
"instructions2": "Check example_card_details for more information",
|
||||
"instructions3": "add the card to the 'cards' array below meta",
|
||||
"example card (do not include this key, just the object)": {
|
||||
"cmc": 7,
|
||||
"colorIdentity": [
|
||||
"W",
|
||||
"B",
|
||||
"G"
|
||||
],
|
||||
"colors": [
|
||||
"White",
|
||||
"Black"
|
||||
],
|
||||
"layout": "normal",
|
||||
"manaCost": "{X}{5}{W}{B}",
|
||||
"name": "Example Card",
|
||||
"number": "55",
|
||||
"power": "*",
|
||||
"rarity": "Mythic Rare",
|
||||
"subtypes": [
|
||||
"Zombie"
|
||||
],
|
||||
"text": "Shadow, Flying, Horsemanship\nExample Card's power is equal to something.\n{G}, {T}: Unfloop target pig you control.",
|
||||
"toughness": "2",
|
||||
"type": "Legendary Creature - Zombie",
|
||||
"types": [
|
||||
"Creature"
|
||||
]
|
||||
},
|
||||
"example card details":{
|
||||
"meta": {
|
||||
"values": "All fields (including loyalty) are string or array of strings except CMC which is int",
|
||||
"required fields (all cards)": [
|
||||
"name",
|
||||
"number",
|
||||
"rarity",
|
||||
"type",
|
||||
"url"
|
||||
]
|
||||
}
|
||||
},
|
||||
"blank card (do not use this key, just the object) - remove unneeded keys":
|
||||
{
|
||||
"name": "",
|
||||
"manaCost": "",
|
||||
"number": "",
|
||||
"rarity": "",
|
||||
"type": "",
|
||||
"url": "",
|
||||
"text": "",
|
||||
"loyalty": "",
|
||||
"cmc": 0,
|
||||
"layout": "",
|
||||
"power": "",
|
||||
"toughness": ""
|
||||
}
|
||||
},
|
||||
"cards": [
|
||||
{
|
||||
"name": "Nicol Bolas, God-Pharaoh",
|
||||
"manaCost": "4UBR",
|
||||
"number": "140",
|
||||
"rarity": "Mythic Rare",
|
||||
"type": "Planeswalker - Bolas",
|
||||
"url": "https://i.imgur.com/DTNXG2z.png",
|
||||
"text": "+2: Target opponent exiles cards from the top of his or her library until he or she exiles a nonland card. Until end of turn, you may cast that card without paying its mana cost.\n+1: Each opponent exiles two cards from his or her hand.\n-4: Nicol Bolas, God-Pharaoh deals 7 damage to target opponent or creature an opponent controls.\n-12: Exile each nonland permanent your opponents control.",
|
||||
"loyalty": "7",
|
||||
"cmc": 7
|
||||
},
|
||||
{
|
||||
"name": "Bontu's Last Reckoning",
|
||||
"manaCost": "1BB",
|
||||
"number": "60",
|
||||
"rarity": "Rare",
|
||||
"type": "Sorcery",
|
||||
"url": "https://i.imgur.com/HOjh3pE.png",
|
||||
"text": "Destroy all creatures. Lands you control don't untap during your next untap step.",
|
||||
"cmc": 3
|
||||
},
|
||||
{
|
||||
"name": "Samut, the Tested",
|
||||
"manaCost": "2RG",
|
||||
"number": "144",
|
||||
"rarity": "Mythic Rare",
|
||||
"type": "Planeswalker - Samut",
|
||||
"url": "https://i.imgur.com/4tRYs5z.png",
|
||||
"text": "+1: Up to one target creature gains double strike until end of turn.\n-2: Samut, the Tested deals 2 damage divided as you choose among one or two target creatures and/or players.\n-7: Search your library for up to two creature and/or planeswalker cards, put them onto the battlefield, then shuffle your library.",
|
||||
"loyalty": "4",
|
||||
"cmc": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
76
deploy.sh
76
deploy.sh
|
|
@ -1,76 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e # Exit with nonzero exit code if anything fails
|
||||
|
||||
SOURCE_BRANCH="deploy-to-SpoilerSeasonFiles"
|
||||
TARGET_BRANCH="files"
|
||||
|
||||
function doCompile {
|
||||
python main.py
|
||||
}
|
||||
|
||||
# Pull requests and commits to other branches shouldn't try to deploy, just build to verify
|
||||
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
|
||||
echo "Skipping deploy; just doing a build."
|
||||
doCompile
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Save some useful information
|
||||
REPO=`git config remote.origin.url`
|
||||
SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:}
|
||||
SHA=`git rev-parse --verify HEAD`
|
||||
|
||||
# Clone the existing gh-pages for this repo into out/
|
||||
# Create a new empty branch if gh-pages doesn't exist yet (should only happen on first deply)
|
||||
git clone $REPO out
|
||||
cd out
|
||||
git checkout $TARGET_BRANCH || git checkout --orphan $TARGET_BRANCH
|
||||
cd ..
|
||||
|
||||
# Clean out existing contents
|
||||
rm -rf out/**/* || exit 0
|
||||
|
||||
# Run our compile script
|
||||
doCompile
|
||||
|
||||
echo TRAVIS_PULL_REQUEST ${TRAVIS_PULL_REQUEST}
|
||||
echo TRAVIS_SECURE_ENV_VARS ${TRAVIS_SECURE_ENV_VARS}
|
||||
echo TRAVIS_EVENT_TYPE ${TRAVIS_EVENT_TYPE}
|
||||
|
||||
# Don't push to our branch for PRs.
|
||||
#if [ "${ghToken:-false}" != "false" ]; then
|
||||
# doCompile
|
||||
#else
|
||||
# doCompile
|
||||
# exit 0
|
||||
#fi
|
||||
|
||||
# Now let's go have some fun with the cloned repo
|
||||
cd out
|
||||
ls
|
||||
git config user.name "Travis CI"
|
||||
git config user.email "$COMMIT_AUTHOR_EMAIL"
|
||||
|
||||
# If there are no changes to the compiled out (e.g. this is a README update) then just bail.
|
||||
#if git diff --quiet; then
|
||||
# echo "No changes to the output on this push; exiting."
|
||||
# exit 0
|
||||
#fi
|
||||
|
||||
# Commit the "changes", i.e. the new version.
|
||||
# The delta will show diffs between new and old versions.
|
||||
git add -A .
|
||||
git commit --allow-empty -m "Deploy to GitHub: ${SHA}"
|
||||
|
||||
# Get the deploy key by using Travis's stored variables to decrypt deploy_key.enc
|
||||
ENCRYPTED_KEY_VAR="encrypted_${ENCRYPTION_LABEL}_key"
|
||||
ENCRYPTED_IV_VAR="encrypted_${ENCRYPTION_LABEL}_iv"
|
||||
ENCRYPTED_KEY=${!ENCRYPTED_KEY_VAR}
|
||||
ENCRYPTED_IV=${!ENCRYPTED_IV_VAR}
|
||||
openssl aes-256-cbc -K $ENCRYPTED_KEY -iv $ENCRYPTED_IV -in ../deploy_key.enc -out ../deploy_key -d
|
||||
chmod 600 ../deploy_key
|
||||
eval `ssh-agent -s`
|
||||
ssh-add ../deploy_key
|
||||
|
||||
# Now that we're all set up, we can push.
|
||||
git push $SSH_REPO $TARGET_BRANCH
|
||||
BIN
deploy_key.enc
BIN
deploy_key.enc
Binary file not shown.
1
magic_spoiler/__init__.py
Normal file
1
magic_spoiler/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
"""Magic Spoiler Program"""
|
||||
669
magic_spoiler/__main__.py
Normal file
669
magic_spoiler/__main__.py
Normal file
|
|
@ -0,0 +1,669 @@
|
|||
"""
|
||||
Handle Scryfall Spoilers
|
||||
"""
|
||||
import contextvars
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import IO, Any, Dict, List, Tuple, Union
|
||||
|
||||
import requests
|
||||
import requests_cache
|
||||
from lxml import etree
|
||||
|
||||
SCRYFALL_SET_URL: str = "https://api.scryfall.com/sets/{}"
|
||||
SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_SCRYFALL")
|
||||
SPOILER_SETS: contextvars.ContextVar = contextvars.ContextVar("SPOILER_SETS")
|
||||
SPOILER_MARK = "~"
|
||||
|
||||
OUTPUT_DIR = pathlib.Path("out")
|
||||
OUTPUT_TMP_DIR = OUTPUT_DIR.joinpath("tmp")
|
||||
XML_ESCAPE_TRANSLATE_MAP = str.maketrans(
|
||||
{"&": "&", '"': """, "<": "<", ">": ">"}
|
||||
)
|
||||
# remove any control characters outright
|
||||
XML_ESCAPE_TRANSLATE_MAP.update({i: "" for i in range(ord(" "))})
|
||||
# don't remove whitespace characters in the sub " " range
|
||||
del XML_ESCAPE_TRANSLATE_MAP[ord("\n")]
|
||||
del XML_ESCAPE_TRANSLATE_MAP[ord("\t")]
|
||||
|
||||
# copied from Cockatrice/oracle/src/oracleimporter.h OracleImporter::mainCardTypes
|
||||
MAINTYPES = (
|
||||
"Planeswalker",
|
||||
"Creature",
|
||||
"Land",
|
||||
"Sorcery",
|
||||
"Instant",
|
||||
"Artifact",
|
||||
"Enchantment"
|
||||
)
|
||||
|
||||
class Priority(Enum):
|
||||
FALLBACK = 0
|
||||
PRIMARY = 10
|
||||
SECONDARY = 20
|
||||
REPRINT = 30
|
||||
OTHER = 40
|
||||
|
||||
SET_TYPE_PRIORITY_MAP = {
|
||||
"core": Priority.PRIMARY,
|
||||
"expansion": Priority.PRIMARY,
|
||||
|
||||
"commander": Priority.SECONDARY,
|
||||
"starter": Priority.SECONDARY,
|
||||
"draft_innovation": Priority.SECONDARY,
|
||||
"duel_deck": Priority.SECONDARY,
|
||||
|
||||
"archenemy": Priority.REPRINT,
|
||||
"arsenal": Priority.REPRINT,
|
||||
"box": Priority.REPRINT,
|
||||
"from_the_vault": Priority.REPRINT,
|
||||
"masterpiece": Priority.REPRINT,
|
||||
"masters": Priority.REPRINT,
|
||||
"memorabilia": Priority.REPRINT,
|
||||
"planechase": Priority.REPRINT,
|
||||
"premium_deck": Priority.REPRINT,
|
||||
"promo": Priority.REPRINT,
|
||||
"spellbook": Priority.REPRINT,
|
||||
"token": Priority.REPRINT,
|
||||
"treasure_chest": Priority.REPRINT,
|
||||
|
||||
"alchemy": Priority.OTHER,
|
||||
"funny": Priority.OTHER,
|
||||
"minigame": Priority.OTHER,
|
||||
"vanguard": Priority.OTHER,
|
||||
}
|
||||
|
||||
|
||||
def __get_session() -> Union[requests.Session, Any]:
|
||||
"""
|
||||
Get the session for downloading content
|
||||
:return: Session
|
||||
"""
|
||||
requests_cache.install_cache(
|
||||
cache_name="scryfall_cache", backend="sqlite", expire_after=7200 # 2 hours
|
||||
)
|
||||
|
||||
if not SESSION.get(None):
|
||||
SESSION.set(requests.Session())
|
||||
return SESSION.get()
|
||||
|
||||
|
||||
def json_download(scryfall_url: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get the data from Scryfall in JSON format using our secret keys
|
||||
:param scryfall_url: URL to json_download JSON data from
|
||||
:return: JSON object of the Scryfall data
|
||||
"""
|
||||
session = __get_session()
|
||||
response: Any = session.get(url=scryfall_url, timeout=10.0)
|
||||
request_api_json: Dict[str, Any] = response.json()
|
||||
print("Downloaded: {} (Cache = {})".format(scryfall_url, response.from_cache))
|
||||
return request_api_json
|
||||
|
||||
|
||||
def download_scryfall_set(set_code: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Download a set from scryfall in entirety
|
||||
:param set_code: Set code
|
||||
:return: Card list
|
||||
"""
|
||||
set_content: Dict[str, Any] = json_download(SCRYFALL_SET_URL.format(set_code))
|
||||
if set_content["object"] == "error":
|
||||
print("API download failed for {}: {}".format(set_code, set_content))
|
||||
return []
|
||||
|
||||
spoiler_cards = []
|
||||
download_url = set_content["search_uri"]
|
||||
|
||||
page_downloaded: int = 1
|
||||
while download_url:
|
||||
page_downloaded += 1
|
||||
|
||||
cards = json_download(download_url)
|
||||
if cards["object"] == "error":
|
||||
print("Set {} has no cards, skipping".format(set_code))
|
||||
break
|
||||
|
||||
for card in cards["data"]:
|
||||
spoiler_cards.append(card)
|
||||
|
||||
if not cards.get("has_more"):
|
||||
break
|
||||
|
||||
download_url = cards["next_page"]
|
||||
|
||||
return sorted(spoiler_cards, key=lambda c: (c["name"], c["collector_number"]))
|
||||
|
||||
|
||||
def build_types(sf_card: Dict[str, Any]) -> Tuple[List[str], str, List[str]]:
|
||||
"""
|
||||
Build the super, type, and sub-types of a given card
|
||||
:param sf_card: Scryfall card
|
||||
:return: Tuple of types
|
||||
"""
|
||||
all_super_types = ["Legendary", "Snow", "Elite", "Basic", "World", "Ongoing"]
|
||||
|
||||
# return values
|
||||
super_types: List[str] = []
|
||||
sub_types: List[str] = []
|
||||
|
||||
# Spoiler cards do not always include a type_line
|
||||
type_line = sf_card.get("type_line", "")
|
||||
if not type_line:
|
||||
type_line = "Unknown"
|
||||
|
||||
if "—" in type_line:
|
||||
card_subs = type_line.split("—")[1].strip()
|
||||
sub_types = card_subs.split(" ") if " " in card_subs else [card_subs]
|
||||
|
||||
for card_type in all_super_types:
|
||||
if card_type in type_line:
|
||||
super_types.append(card_type)
|
||||
|
||||
types: str = type_line.split("—")[0]
|
||||
for card_type in all_super_types:
|
||||
types = types.replace(card_type, "")
|
||||
|
||||
return super_types, types, sub_types
|
||||
|
||||
|
||||
def scryfall2mtgjson(scryfall_cards: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Convert SF cards to MTGJSON v4 format for dispatching
|
||||
:param scryfall_cards: List of Scryfall cards
|
||||
:return: MTGJSON card list
|
||||
"""
|
||||
trice_cards = []
|
||||
|
||||
composed_sf_cards = []
|
||||
|
||||
# Handle split/transform cards
|
||||
for sf_card in scryfall_cards:
|
||||
if "layout" in sf_card.keys():
|
||||
if sf_card["layout"] in ["transform", "split"]:
|
||||
# Make a copy for zoning
|
||||
combined_sides = sf_card.copy()
|
||||
del combined_sides["card_faces"]
|
||||
|
||||
# Quick pointers
|
||||
face_0 = sf_card["card_faces"][0]
|
||||
face_1 = sf_card["card_faces"][1]
|
||||
|
||||
# Update data for the combined
|
||||
combined_sides["layout"] = "double-faced"
|
||||
combined_sides["names"] = [face_0["name"], face_1["name"]]
|
||||
|
||||
# Re-structure two cards into singletons
|
||||
front_side = {**combined_sides, **face_0}
|
||||
back_side = {**combined_sides, **face_1}
|
||||
|
||||
# Uniquify them
|
||||
front_side["collector_number"] += "a"
|
||||
back_side["collector_number"] += "b"
|
||||
|
||||
# And continue on our journey
|
||||
composed_sf_cards.extend([front_side, back_side])
|
||||
else:
|
||||
composed_sf_cards.append(sf_card)
|
||||
|
||||
# Build trice cards from SF cards
|
||||
for sf_card in composed_sf_cards:
|
||||
super_types, types, sub_types = build_types(sf_card)
|
||||
|
||||
if "card_faces" in sf_card:
|
||||
image = (
|
||||
sf_card["card_faces"][0]
|
||||
.get("image_uris", {})
|
||||
.get("normal", "")
|
||||
)
|
||||
else:
|
||||
image = sf_card.get("image_uris", {}).get("normal", "")
|
||||
|
||||
try:
|
||||
trice_card = {
|
||||
"cmc": sf_card["cmc"],
|
||||
"names": sf_card.get("names", None),
|
||||
"mana_cost": sf_card.get("mana_cost", ""),
|
||||
"name": sf_card["name"],
|
||||
"number": sf_card["collector_number"],
|
||||
"rarity": sf_card["rarity"].replace("mythic", "mythic rare").title(),
|
||||
"text": sf_card.get("oracle_text", ""),
|
||||
"url": image,
|
||||
"type": sf_card.get("type_line", "Unknown"),
|
||||
"colorIdentity": sf_card.get("color_identity", None),
|
||||
"colors": sf_card.get("colors", []),
|
||||
"power": sf_card.get("power", None),
|
||||
"toughness": sf_card.get("toughness", None),
|
||||
"layout": sf_card["layout"].replace("normal", ""),
|
||||
"loyalty": sf_card.get("loyalty", None),
|
||||
"artist": sf_card.get("artist", ""),
|
||||
"flavor": sf_card.get("flavor_text", None),
|
||||
"multiverseId": sf_card.get("multiverse_id", None),
|
||||
"superTypes": super_types,
|
||||
"types": types,
|
||||
"subTypes": sub_types,
|
||||
}
|
||||
trice_cards.append(trice_card)
|
||||
|
||||
except Exception as e:
|
||||
# If running in GitHub Actions CI, print the message as a warning
|
||||
if 'GITHUB_ACTION' in os.environ:
|
||||
print(f'::warning::Unable to parse "{sf_card.get("name")}" ({sf_card.get("set").upper()}): {str(e)}')
|
||||
else:
|
||||
print(f'Unable to parse "{sf_card.get("name")}" ({sf_card.get("set").upper()}): {str(e)}')
|
||||
|
||||
return trice_cards
|
||||
|
||||
|
||||
def open_header(card_xml_file: IO[Any], filename: str) -> None:
|
||||
"""
|
||||
Add the header data to the XML file
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write(
|
||||
"<cockatrice_carddatabase version='4' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xsi:schemaLocation='https://raw.githubusercontent.com/Cockatrice/Cockatrice/master/doc/carddatabase_v4/cards.xsd'>\n"
|
||||
+ " <!--\n"
|
||||
+ " Created At: " + datetime.datetime.now(datetime.timezone.utc).strftime("%a, %b %d %Y, %H:%M:%S") + " (UTC)\n"
|
||||
+ " \n"
|
||||
+ " THIS FILE IS AUTOMATICALLY GENERATED & ALL EDITS WILL BE OVERRIDDEN.\n"
|
||||
+ " -->\n"
|
||||
+ "<info>\n"
|
||||
+ " <author>Cockatrice/Magic-Spoiler</author>\n"
|
||||
+ " <createdAt>" + datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S") + " (UTC)</createdAt>\n"
|
||||
+ " <sourceUrl>https://raw.githubusercontent.com/Cockatrice/Magic-Spoiler/files/" + filename + "</sourceUrl>\n"
|
||||
+ " <sourceVersion></sourceVersion>\n"
|
||||
+ "</info>\n"
|
||||
+ "<sets>\n"
|
||||
)
|
||||
|
||||
|
||||
def fill_header_sets(card_xml_file: IO[Any], set_obj: Dict[str, str]) -> None:
|
||||
"""
|
||||
Add header data for set files
|
||||
:param card_xml_file: Card file path
|
||||
:param set_obj: Set object
|
||||
"""
|
||||
priority = SET_TYPE_PRIORITY_MAP.get(set_obj["set_type"].lower(), Priority.FALLBACK)
|
||||
card_xml_file.write(
|
||||
"<set>\n"
|
||||
"<name>" + set_obj["code"] + "</name>\n"
|
||||
"<longname>" + set_obj["name"] + " (Spoiler)</longname>\n"
|
||||
"<settype>" + set_obj["set_type"].replace("_", " ").title() + "</settype>\n"
|
||||
"<releasedate>" + set_obj["released_at"] + "</releasedate>\n"
|
||||
"<priority>" + str(priority.value) + "</priority>\n"
|
||||
"</set>\n"
|
||||
)
|
||||
|
||||
|
||||
def close_header(card_xml_file: IO[Any]) -> None:
|
||||
"""
|
||||
Add closing data to files
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write("</sets>\n<cards>\n")
|
||||
|
||||
|
||||
def close_xml_file(card_xml_file: IO[Any]) -> None:
|
||||
"""
|
||||
Add final touch to files to validate them,
|
||||
then pretty them
|
||||
:param card_xml_file: Card file path
|
||||
"""
|
||||
card_xml_file.write("</cards>\n</cockatrice_carddatabase>\n")
|
||||
card_xml_file.close()
|
||||
|
||||
# Make the files pretty and add xml declaration
|
||||
parser = etree.XMLParser(remove_blank_text=True)
|
||||
root = etree.parse(card_xml_file.name, parser).getroot()
|
||||
with pathlib.Path(card_xml_file.name).open("wb") as f:
|
||||
f.write(etree.tostring(root, encoding="UTF-8", xml_declaration=True, pretty_print=True))
|
||||
|
||||
|
||||
def xml_escape(text):
|
||||
return text.translate(XML_ESCAPE_TRANSLATE_MAP)
|
||||
|
||||
|
||||
def write_cards(
|
||||
card_xml_file: Any, trice_dict: List[Dict[str, Any]], set_code: str
|
||||
) -> None:
|
||||
"""
|
||||
Given a list of cards, write the cards to an output file
|
||||
:param card_xml_file: Output file to write to
|
||||
:param trice_dict: List of cards
|
||||
:param set_code: Set code
|
||||
"""
|
||||
for card in trice_dict:
|
||||
if "names" in card.keys() and card["names"]:
|
||||
if "layout" in card and card["layout"] != "double-faced":
|
||||
if card["name"] == card["names"][1]:
|
||||
continue
|
||||
|
||||
set_name = card["name"]
|
||||
|
||||
if "mana_cost" in card.keys():
|
||||
mana_cost = card["mana_cost"].replace("{", "").replace("}", "")
|
||||
else:
|
||||
mana_cost = ""
|
||||
|
||||
if "power" in card.keys() or "toughness" in card.keys():
|
||||
if card["power"]:
|
||||
pow_tough = str(card["power"]) + "/" + str(card["toughness"])
|
||||
else:
|
||||
pow_tough = ""
|
||||
else:
|
||||
pow_tough = ""
|
||||
|
||||
if "loyalty" in card.keys() and card["loyalty"]:
|
||||
loyalty = str(card["loyalty"])
|
||||
else:
|
||||
loyalty = ""
|
||||
|
||||
if "text" in card.keys():
|
||||
text = card["text"]
|
||||
else:
|
||||
text = ""
|
||||
|
||||
card_cmc = str(card["cmc"])
|
||||
if card_cmc.endswith(".0"):
|
||||
card_cmc = card_cmc[:-2]
|
||||
|
||||
card_type = card["type"]
|
||||
|
||||
table_row = "1"
|
||||
if "Land" in card_type:
|
||||
table_row = "0"
|
||||
elif "Sorcery" in card_type:
|
||||
table_row = "3"
|
||||
elif "Instant" in card_type:
|
||||
table_row = "3"
|
||||
elif "Creature" in card_type:
|
||||
table_row = "2"
|
||||
|
||||
for maintype in MAINTYPES:
|
||||
if maintype in card_type:
|
||||
break
|
||||
else:
|
||||
maintype = None
|
||||
|
||||
if "names" in card.keys():
|
||||
if "layout" in card:
|
||||
if card["layout"] == "split" or card["layout"] == "aftermath":
|
||||
if "names" in card:
|
||||
if card["name"] == card["names"][0]:
|
||||
for json_card in trice_dict:
|
||||
if json_card["name"] == card["names"][1]:
|
||||
card_type += " // " + json_card["type"]
|
||||
new_mc = ""
|
||||
if "mana_cost" in json_card:
|
||||
new_mc = json_card["mana_cost"]
|
||||
mana_cost += " // " + new_mc.replace(
|
||||
"{", ""
|
||||
).replace("}", "")
|
||||
card_cmc += " // " + str(json_card["cmc"])
|
||||
text += "\n---\n" + json_card["text"]
|
||||
set_name += " // " + json_card["name"]
|
||||
elif card["layout"] == "double-faced":
|
||||
if "names" not in card.keys():
|
||||
print(card["name"] + ' is double-faced but no "names" key')
|
||||
else:
|
||||
pass
|
||||
else:
|
||||
print(card["name"] + " has multiple names and no 'layout' key")
|
||||
|
||||
if "number" in card:
|
||||
if "b" in str(card["number"]):
|
||||
if "layout" in card:
|
||||
if card["layout"] == "split" or card["layout"] == "aftermath":
|
||||
continue
|
||||
|
||||
set_name, mana_cost, card_cmc, card_type, pow_tough, table_row, text, loyalty = map(
|
||||
xml_escape,
|
||||
[set_name, mana_cost, card_cmc, card_type, pow_tough, table_row, text, loyalty],
|
||||
)
|
||||
card_xml_file.write("<card>\n")
|
||||
card_xml_file.write("<name>" + set_name + "</name>\n")
|
||||
card_xml_file.write("<text>" + text + "</text>\n")
|
||||
card_xml_file.write("<prop>\n")
|
||||
if "colors" in card.keys() and card["colors"]:
|
||||
card_xml_file.write("<colors>" + "".join(card["colors"]) + "</colors>\n")
|
||||
|
||||
card_xml_file.write("<type>" + card_type + "</type>\n")
|
||||
if maintype:
|
||||
card_xml_file.write("<maintype>" + maintype + "</maintype>\n")
|
||||
|
||||
card_xml_file.write("<cmc>" + card_cmc + "</cmc>\n")
|
||||
if mana_cost:
|
||||
card_xml_file.write("<manacost>" + mana_cost + "</manacost>\n")
|
||||
|
||||
if pow_tough:
|
||||
card_xml_file.write("<pt>" + pow_tough + "</pt>\n")
|
||||
|
||||
if loyalty:
|
||||
card_xml_file.write("<loyalty>" + loyalty + "</loyalty>\n")
|
||||
|
||||
card_xml_file.write("</prop>\n")
|
||||
card_xml_file.write(
|
||||
'<set rarity="'
|
||||
+ str(card["rarity"])
|
||||
+ '" picURL="'
|
||||
+ str(card["url"])
|
||||
+ '">'
|
||||
+ str(set_code)
|
||||
+ "</set>\n"
|
||||
)
|
||||
if set_name + " enters the battlefield tapped" in text:
|
||||
card_xml_file.write("<cipt>1</cipt>\n")
|
||||
|
||||
card_xml_file.write("<tablerow>" + table_row + "</tablerow>\n")
|
||||
card_xml_file.write("</card>\n")
|
||||
|
||||
|
||||
def write_spoilers_xml(trice_dicts: Dict[str, List[Dict[str, Any]]]) -> bool:
|
||||
"""
|
||||
Write the spoiler.xml file
|
||||
:param trice_dicts: Dict of dict entries
|
||||
:return: Written successfully
|
||||
"""
|
||||
output_file_name = "spoiler.xml"
|
||||
|
||||
pathlib.Path("out").mkdir(parents=True, exist_ok=True)
|
||||
card_xml_file = OUTPUT_TMP_DIR.joinpath(output_file_name).open("w", encoding="utf-8")
|
||||
|
||||
# Fill in set headers
|
||||
open_header(card_xml_file, output_file_name)
|
||||
for value in SPOILER_SETS.get():
|
||||
fill_header_sets(card_xml_file, {key: (value_ + SPOILER_MARK if key == "code" else value_) for key, value_ in value.items()})
|
||||
close_header(card_xml_file)
|
||||
|
||||
# Write in all the cards
|
||||
for value in SPOILER_SETS.get():
|
||||
try:
|
||||
write_cards(card_xml_file, trice_dicts[value["code"]], value["code"] + SPOILER_MARK)
|
||||
except KeyError:
|
||||
print("Skipping " + value["code"])
|
||||
|
||||
close_xml_file(card_xml_file)
|
||||
|
||||
old_xml_location = str(OUTPUT_DIR.joinpath(output_file_name))
|
||||
if compare_xml_content(card_xml_file.name, old_xml_location):
|
||||
print("No new data in spoiler.xml, skipping replacement")
|
||||
return False
|
||||
|
||||
# Move new version to old location
|
||||
print("Changes detected, replacing spoiler.xml with updated version")
|
||||
shutil.move(card_xml_file.name, old_xml_location)
|
||||
return True
|
||||
|
||||
|
||||
def compare_xml_content(a: str, b: str) -> bool:
|
||||
"""
|
||||
Compare the contents of two XML files and report
|
||||
if the contents are the same, minus the info part and comments
|
||||
:param a: File a
|
||||
:param b: File b
|
||||
:return: Is file content, minus info and comments, the same?
|
||||
"""
|
||||
files = [pathlib.Path(file_n) for file_n in (a, b)]
|
||||
|
||||
if all([filepath.is_file() for filepath in files]):
|
||||
hashes = []
|
||||
for filepath in files:
|
||||
parser = etree.XMLParser(remove_blank_text=True)
|
||||
root = etree.parse(str(filepath), parser).getroot()
|
||||
etree.strip_elements(root, "info", etree.Comment)
|
||||
digest = hashlib.sha512(etree.tostring(root)).hexdigest()
|
||||
hashes.append(digest)
|
||||
|
||||
return hashes[0] == hashes[1]
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def write_set_xml(trice_dict: List[Dict[str, Any]], set_obj: Dict[str, str]) -> bool:
|
||||
"""
|
||||
Write out a single magic set to XML format
|
||||
:param trice_dict: Cards to print
|
||||
:param set_obj: Set object
|
||||
:return: Written successfully
|
||||
"""
|
||||
if not trice_dict:
|
||||
return False
|
||||
|
||||
OUTPUT_TMP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
set_code = set_obj["code"]
|
||||
file_path = OUTPUT_TMP_DIR.joinpath(f"{set_code}.xml")
|
||||
card_xml_file = file_path.open("w", encoding="utf-8")
|
||||
|
||||
open_header(card_xml_file, file_path.name)
|
||||
fill_header_sets(card_xml_file, set_obj)
|
||||
close_header(card_xml_file)
|
||||
write_cards(card_xml_file, trice_dict, set_obj["code"])
|
||||
close_xml_file(card_xml_file)
|
||||
|
||||
# If content didn't change, discard newest creation
|
||||
old_xml_location = str(OUTPUT_DIR.joinpath("{}.xml".format(set_obj["code"])))
|
||||
if compare_xml_content(card_xml_file.name, old_xml_location):
|
||||
print("No new data in {}.xml, skipping replacement".format(set_obj["code"]))
|
||||
return False
|
||||
|
||||
# Move new version to old location
|
||||
print(
|
||||
"Changes detected, replacing {}.xml with updated version".format(
|
||||
set_obj["code"]
|
||||
)
|
||||
)
|
||||
shutil.move(card_xml_file.name, old_xml_location)
|
||||
return True
|
||||
|
||||
|
||||
def get_spoiler_sets() -> List[Dict[str, str]]:
|
||||
"""
|
||||
Download Sf sets and mark spoiler sets
|
||||
:return: Spoiler sets
|
||||
"""
|
||||
sf_sets = json_download(SCRYFALL_SET_URL.format(""))
|
||||
if sf_sets["object"] == "error":
|
||||
print("Unable to download SF correctly: {}".format(sf_sets))
|
||||
return []
|
||||
|
||||
spoiler_sets = []
|
||||
# Find list of possible Set Types to exclude here: https://scryfall.com/docs/api/sets
|
||||
excluded_set_types = ["alchemy", "masterpiece", "arsenal", "from_the_vault", "spellbook", "premium_deck", "duel_deck",
|
||||
"draft_innovation", "treasure_chest", "planechase", "archenemy", "vanguard", "box", "promo",
|
||||
"token", "memorabilia", "minigame"]
|
||||
|
||||
for sf_set in sf_sets["data"]:
|
||||
if (
|
||||
sf_set["released_at"] >= time.strftime("%Y-%m-%d %H:%M:%S")
|
||||
and sf_set["set_type"] not in excluded_set_types
|
||||
and sf_set["card_count"]
|
||||
):
|
||||
sf_set["code"] = sf_set["code"].upper()
|
||||
spoiler_sets.append(sf_set)
|
||||
|
||||
return spoiler_sets
|
||||
|
||||
|
||||
def delete_old_files() -> bool:
|
||||
"""
|
||||
Delete files that are no longer necessary within the program
|
||||
:return: Files were deleted
|
||||
"""
|
||||
valid_files = [x["code"].upper() for x in SPOILER_SETS.get()] + [
|
||||
"spoiler",
|
||||
"SpoilerSeasonEnabled",
|
||||
"README",
|
||||
]
|
||||
|
||||
deleted = False
|
||||
for output_file in OUTPUT_DIR.glob("*"):
|
||||
if not output_file.is_file():
|
||||
continue
|
||||
|
||||
if output_file.stem not in valid_files:
|
||||
output_file.unlink()
|
||||
deleted = True
|
||||
|
||||
if OUTPUT_TMP_DIR.is_dir():
|
||||
shutil.rmtree(OUTPUT_TMP_DIR)
|
||||
|
||||
enabled_path = OUTPUT_DIR.joinpath("SpoilerSeasonEnabled")
|
||||
if not SPOILER_SETS.get():
|
||||
enabled_path.unlink(missing_ok=True)
|
||||
else:
|
||||
enabled_path.open("w", encoding="utf-8").write(" ")
|
||||
|
||||
return deleted
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Main dispatch thread
|
||||
"""
|
||||
|
||||
# Determine what sets have spoiler data
|
||||
SPOILER_SETS.set(get_spoiler_sets())
|
||||
|
||||
spoiler_xml = {}
|
||||
changed = False
|
||||
for set_info in SPOILER_SETS.get():
|
||||
print("Handling {}".format(set_info["code"]))
|
||||
|
||||
cards = download_scryfall_set(set_info["code"])
|
||||
trice_dict = scryfall2mtgjson(cards)
|
||||
|
||||
# Write SET.xml
|
||||
changed |= write_set_xml(trice_dict, set_info)
|
||||
|
||||
# Save for spoiler.xml
|
||||
spoiler_xml[set_info["code"]] = trice_dict
|
||||
|
||||
if spoiler_xml:
|
||||
# Write out the spoiler.xml file
|
||||
changed |= write_spoilers_xml(spoiler_xml)
|
||||
|
||||
# Cleanup outdated stuff that's not necessary
|
||||
changed |= delete_old_files()
|
||||
|
||||
# Enable deployment on changes (used in CI)
|
||||
try:
|
||||
github_output = os.environ["GITHUB_OUTPUT"]
|
||||
except KeyError:
|
||||
print(f"not in ci but deploy={str(changed).lower()}")
|
||||
else:
|
||||
with open(github_output, "a") as fp:
|
||||
print(f"deploy={str(changed).lower()}", file=fp)
|
||||
|
||||
if not changed:
|
||||
print("::notice title=No updates available::"
|
||||
"No new spoiler cards found for deployment")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
101
main.py
101
main.py
|
|
@ -1,101 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import spoilers
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
#import configparser
|
||||
import json
|
||||
#import urllib
|
||||
|
||||
presets = {
|
||||
"isfullspoil": False, #when full spoil comes around, we only want to use WOTC images
|
||||
"includeMasterpieces": True, #if the set has masterpieces, let's get those too
|
||||
"oldRSS": False #maybe MTGS hasn't updated their spoiler.rss but new cards have leaked
|
||||
}
|
||||
|
||||
with open('set_info.json') as data_file:
|
||||
setinfos = json.load(data_file)
|
||||
|
||||
with open('cards_manual.json') as data_file:
|
||||
manual_cards = json.load(data_file)
|
||||
manual_cards = manual_cards['cards']
|
||||
|
||||
with open('cards_corrections.json') as data_file:
|
||||
card_corrections = json.load(data_file)
|
||||
|
||||
with open('cards_delete.json') as data_file:
|
||||
delete_cards = json.load(data_file)
|
||||
|
||||
errorlog = []
|
||||
|
||||
#TODO insert configparser to add config.ini file
|
||||
|
||||
for argument in sys.argv:
|
||||
#we can modify any of the variables from the set infos file or the presets above at runtime
|
||||
#works only for first-level variables currently (editing masterpieces
|
||||
#syntax is variable="new value"
|
||||
for setinfo in setinfos:
|
||||
if setinfo in argument.split("=")[0]:
|
||||
setinfos[setinfo] = argument.split("=")[1]
|
||||
for preset in presets:
|
||||
if preset in argument.split("=")[0]:
|
||||
presets[preset] = argument.split("=")[1]
|
||||
|
||||
def save_allsets(AllSets):
|
||||
#TODO Create AllSets.json for Oracle
|
||||
print "Saving AllSets"
|
||||
|
||||
def save_masterpieces(masterpieces):
|
||||
with open('out/' + setinfos['masterpieces']['setname'] + '.json', 'w') as outfile:
|
||||
json.dump(masterpieces, outfile, sort_keys=True, indent=2, separators=(',', ': '))
|
||||
|
||||
def save_setjson(mtgs):
|
||||
with open('out/' + setinfos['setname'] + '.json', 'w') as outfile:
|
||||
json.dump(mtgs, outfile, sort_keys=True, indent=2, separators=(',', ': '))
|
||||
|
||||
def save_errorlog(errorlog):
|
||||
fixederrors = []
|
||||
unfixederrors = []
|
||||
for error in errorlog:
|
||||
if 'fixed' in error:
|
||||
fixederrors.append(error)
|
||||
else:
|
||||
unfixederrors.append(error)
|
||||
errorlog = {"unfixed": unfixederrors, "fixed": fixederrors}
|
||||
with open('out/errors.json', 'w') as outfile:
|
||||
json.dump(errorlog, outfile, sort_keys=True, indent=2, separators=(',', ': '))
|
||||
|
||||
def save_xml(xmlstring, outfile):
|
||||
with open(outfile,'w+') as xmlfile:
|
||||
xmlfile.write(xmlstring.encode('utf-8'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
AllSets = spoilers.get_allsets() #get AllSets from mtgjson
|
||||
if presets['oldRSS']:
|
||||
mtgs = {"cards":[]}
|
||||
else:
|
||||
mtgs = spoilers.scrape_mtgs('http://www.mtgsalvation.com/spoilers.rss') #scrape mtgs rss feed
|
||||
mtgs = spoilers.parse_mtgs(mtgs) #parse spoilers into mtgjson format
|
||||
mtgs = spoilers.correct_cards(mtgs, manual_cards, card_corrections, delete_cards) #fix using the fixfiles
|
||||
scryfall = spoilers.get_scryfall('https://api.scryfall.com/cards/search?q=++e:' + setinfos['setname'].lower())
|
||||
mtgs = spoilers.get_image_urls(mtgs, presets['isfullspoil'], setinfos['setname'], setinfos['setlongname'], setinfos['setsize']) #get images
|
||||
mtgjson = spoilers.smash_mtgs_scryfall(mtgs, scryfall)
|
||||
[mtgjson, errors] = spoilers.errorcheck(mtgjson) #check for errors where possible
|
||||
errorlog += errors
|
||||
spoilers.write_xml(mtgjson, setinfos['setname'], setinfos['setlongname'], setinfos['setreleasedate'])
|
||||
save_xml(spoilers.pretty_xml(setinfos['setname']), 'out/spoiler.xml')
|
||||
mtgs = spoilers.add_headers(mtgjson, setinfos)
|
||||
AllSets = spoilers.make_allsets(AllSets, mtgjson, setinfos['setname'])
|
||||
if 'masterpieces' in setinfos: #repeat all of the above for masterpieces
|
||||
#masterpieces aren't in the rss feed, so for the new cards, we'll go to their individual pages on mtgs
|
||||
#old cards will get their infos copied from mtgjson (including fields that may not apply like 'artist')
|
||||
#the images will still come from mtgs
|
||||
masterpieces = spoilers.make_masterpieces(setinfos['masterpieces'], AllSets, mtgjson)
|
||||
[masterpieces, errors] = spoilers.errorcheck(masterpieces)
|
||||
errorlog += errors
|
||||
spoilers.write_xml(masterpieces, setinfos['masterpieces']['setname'], setinfos['masterpieces']['setlongname'], setinfos['masterpieces']['setreleasedate'])
|
||||
AllSets = spoilers.make_allsets(AllSets, masterpieces, setinfos['masterpieces']['setname'])
|
||||
save_masterpieces(masterpieces)
|
||||
save_errorlog(errorlog)
|
||||
save_allsets(AllSets)
|
||||
save_setjson(mtgjson)
|
||||
18
mypy.ini
Normal file
18
mypy.ini
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
[mypy]
|
||||
python_version = 3.7
|
||||
|
||||
check_untyped_defs = True
|
||||
disallow_untyped_calls = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_subclassing_any = True
|
||||
follow_imports = normal
|
||||
incremental = True
|
||||
ignore_missing_imports = True
|
||||
strict_optional = True
|
||||
warn_no_return = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
warn_unused_ignores = True
|
||||
|
||||
[mypy-pkg/generated_code/*]
|
||||
ignore_errors = True
|
||||
|
|
@ -1,5 +1,3 @@
|
|||
requests==2.13.0
|
||||
feedparser
|
||||
lxml
|
||||
Pillow
|
||||
datetime
|
||||
requests
|
||||
requests_cache
|
||||
7
requirements_test.txt
Normal file
7
requirements_test.txt
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
black
|
||||
isort
|
||||
mypy
|
||||
pylint
|
||||
pytest
|
||||
pytest-cov
|
||||
tox
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"setname": "HOU",
|
||||
"setlongname": "Hour of Devastation",
|
||||
"blockname": "Amonkhet",
|
||||
"setsize": 199,
|
||||
"setreleasedate": "2017-07-14",
|
||||
"settype": "expansion",
|
||||
"masterpieces": {
|
||||
"setname": "MPS_AKH",
|
||||
"setlongname": "Masterpiece Series: Amonkhet Invocations",
|
||||
"setreleasedate": "2017-04-28",
|
||||
"alternativeNames": ["Amonkhet Invocations"],
|
||||
"galleryURL": "http://magic.wizards.com/en/articles/archive/card-preview/masterpiece-series-amonkhet-invocations-2017-03-29",
|
||||
"additionalCardNames": [],
|
||||
"mtgsurl": "http://www.mtgsalvation.com/spoilers/181-amonkhet-invocations",
|
||||
"mtgscardpath": "http://www.mtgsalvation.com/cards/amonkhet-invocations/"
|
||||
}
|
||||
}
|
||||
24
setup.py
Normal file
24
setup.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
"""Installation setup for Magic-Spoiler."""
|
||||
|
||||
import setuptools
|
||||
|
||||
# Necessary for TOX
|
||||
setuptools.setup(
|
||||
name="Magic-Spoiler",
|
||||
version="0.1.0",
|
||||
author="Zach Halpern",
|
||||
author_email="zach@cockatrice.us",
|
||||
url="https://github.com/Cockatrice/Magic-Spoiler/",
|
||||
description="Build XML files for distribution of MTG spoiler cards",
|
||||
long_description=open("README.md", "r").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
license="GPL-3.0",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
|
||||
],
|
||||
keywords="Magic: The Gathering, MTG, XML, Card Games, Collectible, Trading Cards",
|
||||
packages=setuptools.find_packages(),
|
||||
)
|
||||
991
spoilers.py
991
spoilers.py
|
|
@ -1,991 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import requests
|
||||
import feedparser
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from lxml import html, etree
|
||||
from PIL import Image
|
||||
import datetime
|
||||
import urllib
|
||||
import json
|
||||
import xml.dom.minidom
|
||||
|
||||
def scrape_mtgs(url):
|
||||
return requests.get(url, headers={'Cache-Control':'no-cache', 'Pragma':'no-cache', 'Expires': 'Thu, 01 Jan 1970 00:00:00 GMT'}).text
|
||||
|
||||
def parse_mtgs(mtgs, manual_cards=[], card_corrections=[], delete_cards=[], split_cards=[], related_cards=[]):
|
||||
mtgs = mtgs.replace('utf-16','utf-8')
|
||||
patterns = ['<b>Name:</b> <b>(?P<name>.*?)<',
|
||||
'Cost: (?P<cost>\d{0,2}[WUBRGC]*?)<',
|
||||
'Type: (?P<type>.*?)<',
|
||||
'Pow/Tgh: (?P<pow>.*?)<',
|
||||
'Rules Text: (?P<rules>.*?)<br /',
|
||||
'Rarity: (?P<rarity>.*?)<',
|
||||
'Set Number: #(?P<setnumber>.*?)/'
|
||||
]
|
||||
d = feedparser.parse(mtgs)
|
||||
|
||||
cards = []
|
||||
for entry in d.items()[5][1]:
|
||||
card = dict(cost='',cmc='',img='',pow='',name='',rules='',type='',
|
||||
color='', altname='', colorIdentity='', colorArray=[], colorIdentityArray=[], setnumber='', rarity='')
|
||||
summary = entry['summary']
|
||||
for pattern in patterns:
|
||||
match = re.search(pattern, summary, re.MULTILINE|re.DOTALL)
|
||||
if match:
|
||||
dg = match.groupdict()
|
||||
card[dg.items()[0][0]] = dg.items()[0][1]
|
||||
cards.append(card)
|
||||
|
||||
#if we didn't find any cards, let's bail out to prevent overwriting good data
|
||||
count = 0
|
||||
for card in cards:
|
||||
count = count + 1
|
||||
if count < 1:
|
||||
sys.exit("No cards found, exiting to prevent file overwrite")
|
||||
|
||||
#for manual_card in manual_cards:
|
||||
#initialize some keys
|
||||
#manual_card['colorArray'] = []
|
||||
#manual_card['colorIdentityArray'] = []
|
||||
#manual_card['color'] = ''
|
||||
#manual_card['colorIdentity'] = ''
|
||||
#if not manual_card.has_key('rules'):
|
||||
# manual_card['rules'] = ''
|
||||
#if not manual_card.has_key('pow'):
|
||||
# manual_card['pow'] = ''
|
||||
#if not manual_card.has_key('setnumber'):
|
||||
# manual_card['setnumber'] = '0'
|
||||
#if not manual_card.has_key('type'):
|
||||
# manual_card['type'] = ''
|
||||
#see if this is a dupe
|
||||
#and remove the spoiler version
|
||||
#i trust my manual cards over their data
|
||||
#for card in cards:
|
||||
# if card['name'] == manual_card['name']:
|
||||
# cards.remove(card)
|
||||
#cards.append(manual_card)
|
||||
|
||||
for card in cards:
|
||||
card['name'] = card['name'].replace(''', '\'')
|
||||
card['rules'] = card['rules'].replace(''', '\'') \
|
||||
.replace('<i>', '') \
|
||||
.replace('</i>', '') \
|
||||
.replace('"', '"') \
|
||||
.replace('blkocking', 'blocking')\
|
||||
.replace('&bull;','*')\
|
||||
.replace('comes into the','enters the')\
|
||||
.replace('threeor', 'three or')\
|
||||
.replace('[i]','')\
|
||||
.replace('[/i]','')\
|
||||
.replace('Lawlwss','Lawless')\
|
||||
.replace('Costner',"Counter")
|
||||
card['type'] = card['type'].replace(' ',' ')\
|
||||
.replace('Crature', 'Creature')
|
||||
if card['type'][-1] == ' ':
|
||||
card['type'] = card['type'][:-1]
|
||||
#if card['name'] in card_corrections:
|
||||
# for correction in card_corrections[card['name']]:
|
||||
# if correction != 'name':
|
||||
# card[correction] = card_corrections[card['name']][correction]
|
||||
# for correction in card_corrections[card['name']]:
|
||||
# if correction == 'name':
|
||||
# oldname = card['name']
|
||||
# card['name'] = card_corrections[oldname]['name']
|
||||
# card['rules'] = card['rules'].replace(oldname, card_corrections[oldname][correction])
|
||||
if 'cost' in card and len(card['cost']) > 0:
|
||||
workingCMC = 0
|
||||
stripCost = card['cost'].replace('{','').replace('}','')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
card['cmc'] = workingCMC
|
||||
# figure out color
|
||||
for c in 'WUBRG':
|
||||
if c not in card['colorIdentity']:
|
||||
if c in card['cost']:
|
||||
card['color'] += c
|
||||
card['colorIdentity'] += c
|
||||
if (c + '}') in card['rules'] or (str.lower(c) + '}') in card['rules']:
|
||||
if not (c in card['colorIdentity']):
|
||||
card['colorIdentity'] += c
|
||||
|
||||
cleanedcards = []
|
||||
|
||||
#let's remove any cards that are named in delete_cards array
|
||||
for card in cards:
|
||||
if not card['name'] in delete_cards:
|
||||
cleanedcards.append(card)
|
||||
cards = cleanedcards
|
||||
|
||||
cardlist = []
|
||||
cardarray = []
|
||||
for card in cards:
|
||||
dupe = False
|
||||
for dupecheck in cardarray:
|
||||
if dupecheck['name'] == card['name']:
|
||||
dupe = True
|
||||
if dupe == True:
|
||||
continue
|
||||
#if 'draft' in card['rules']:
|
||||
# continue
|
||||
for cid in card['colorIdentity']:
|
||||
card['colorIdentityArray'].append(cid)
|
||||
if 'W' in card['color']:
|
||||
card['colorArray'].append('White')
|
||||
if 'U' in card['color']:
|
||||
card['colorArray'].append('Blue')
|
||||
if 'B' in card['color']:
|
||||
card['colorArray'].append('Black')
|
||||
if 'R' in card['color']:
|
||||
card['colorArray'].append('Red')
|
||||
if 'G' in card['color']:
|
||||
card['colorArray'].append('Green')
|
||||
cardpower = ''
|
||||
cardtoughness = ''
|
||||
if len(card['pow'].split('/')) > 1:
|
||||
cardpower = card['pow'].split('/')[0]
|
||||
cardtoughness = card['pow'].split('/')[1]
|
||||
cardnames = []
|
||||
cardnumber = card['setnumber'].lstrip('0')
|
||||
if card['name'] in related_cards:
|
||||
cardnames.append(card['name'])
|
||||
cardnames.append(related_cards[card['name']])
|
||||
cardnumber += 'a'
|
||||
card['layout'] = 'double-faced'
|
||||
for namematch in related_cards:
|
||||
if card['name'] == related_cards[namematch]:
|
||||
card['layout'] = 'double-faced'
|
||||
cardnames.append(namematch)
|
||||
if not card['name'] in cardnames:
|
||||
cardnames.append(card['name'])
|
||||
cardnumber += 'b'
|
||||
cardnames = []
|
||||
if card['name'] in split_cards:
|
||||
cardnames.append(card['name'])
|
||||
cardnames.append(split_cards[card['name']])
|
||||
cardnumber = cardnumber.replace('b','').replace('a','') + 'a'
|
||||
card['layout'] = 'split'
|
||||
for namematch in split_cards:
|
||||
if card['name'] == split_cards[namematch]:
|
||||
card['layout'] = 'split'
|
||||
cardnames.append(namematch)
|
||||
if not card['name'] in cardnames:
|
||||
cardnames.append(card['name'])
|
||||
cardnumber = cardnumber.replace('b','').replace('a','') + 'b'
|
||||
if 'number' in card:
|
||||
if 'b' in card['number'] or 'a' in card['number']:
|
||||
if not 'layout' in card:
|
||||
print card['name'] + " has a a/b number but no 'layout'"
|
||||
|
||||
cardtypes = []
|
||||
if not '-' in card['type']:
|
||||
card['type'] = card['type'].replace('instant','Instant').replace('sorcery','Sorcery').replace('creature','Creature')
|
||||
cardtypes.append(card['type'].replace('instant','Instant'))
|
||||
else:
|
||||
cardtypes = card['type'].replace('Legendary ','').split(' - ')[0].split(' ')[:-1]
|
||||
if '-' in card['type']:
|
||||
subtype = card['type'].split(' - ')[1].strip()
|
||||
#if u"—" in card['type']:
|
||||
# subtype = card['type'].split(' — ')[1].strip()
|
||||
if subtype:
|
||||
subtypes = subtype.split(' ')
|
||||
if card['cmc'] == '':
|
||||
card['cmc'] = 0
|
||||
cardjson = {}
|
||||
#cardjson["id"] = hashlib.sha1(setname + card['name'] + str(card['name']).lower()).hexdigest()
|
||||
cardjson["cmc"] = card['cmc']
|
||||
cardjson["manaCost"] = card['cost']
|
||||
cardjson["name"] = card['name']
|
||||
cardjson["number"] = cardnumber
|
||||
#not sure if mtgjson has a list of acceptable rarities, but my application does
|
||||
#so we'll warn me but continue to write a non-standard rarity (timeshifted?)
|
||||
#may force 'special' in the future
|
||||
if card['rarity'] not in ['Mythic Rare','Rare','Uncommon','Common','Special']:
|
||||
#errors.append({"name": card['name'], "key": "rarity", "value": card['rarity']})
|
||||
print card['name'] + ' has rarity = ' + card['rarity']
|
||||
if subtypes:
|
||||
cardjson['subtypes'] = subtypes
|
||||
cardjson["rarity"] = card['rarity']
|
||||
cardjson["text"] = card['rules']
|
||||
cardjson["type"] = card['type']
|
||||
cardjson["url"] = card['img']
|
||||
cardjson["types"] = cardtypes
|
||||
#optional fields
|
||||
if len(card['colorIdentityArray']) > 0:
|
||||
cardjson["colorIdentity"] = card['colorIdentityArray']
|
||||
if len(card['colorArray']) > 0:
|
||||
cardjson["colors"] = card['colorArray']
|
||||
if len(cardnames) > 1:
|
||||
cardjson["names"] = cardnames
|
||||
if cardpower or cardpower == '0':
|
||||
cardjson["power"] = cardpower
|
||||
cardjson["toughness"] = cardtoughness
|
||||
if card.has_key('loyalty'):
|
||||
cardjson["loyalty"] = card['loyalty']
|
||||
if card.has_key('layout'):
|
||||
cardjson["layout"] = card['layout']
|
||||
|
||||
cardarray.append(cardjson)
|
||||
|
||||
return {"cards": cardarray}
|
||||
|
||||
def correct_cards(mtgjson, manual_cards=[], card_corrections=[], delete_cards=[]):
|
||||
mtgjson2 = []
|
||||
for card in manual_cards:
|
||||
if 'cmc' not in card:
|
||||
workingCMC = 0
|
||||
stripCost = card['manaCost'].replace('{','').replace('}','')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
if 'types' not in card:
|
||||
card['types'] = []
|
||||
# if '—' in card['type']:
|
||||
# workingTypes = card['type'].split('—')[0].strip()
|
||||
# else:
|
||||
workingTypes = card['type'].split('-')[0].strip()
|
||||
workingTypes.replace('Legendary ','').replace('Snow ','')\
|
||||
.replace('Elite ','').replace('Basic ','').replace('World ','').replace('Ongoing ','')
|
||||
card['types'] += workingTypes.split(' ')
|
||||
if 'subtypes' not in card:
|
||||
# if '—' in card['type']:
|
||||
# workingSubtypes = card['type'].split('—')[1].strip()
|
||||
if '-' in card['type']:
|
||||
workingSubtypes = card['type'].split('-')[1].strip()
|
||||
if workingSubtypes:
|
||||
card['subtypes'] = workingSubtypes.split(' ')
|
||||
colorMap = {
|
||||
"W": "White",
|
||||
"U": "Blue",
|
||||
"B": "Black",
|
||||
"R": "Red",
|
||||
"G": "Green"
|
||||
}
|
||||
if 'manaCost' in card:
|
||||
if 'text' in card and not 'Devoid' in card['text']:
|
||||
for letter in card['manaCost']:
|
||||
if not letter.isdigit() and not letter == 'X':
|
||||
if 'colorIdentity' in card:
|
||||
if not letter in card['colorIdentity']:
|
||||
card['colorIdentity'] += letter
|
||||
else:
|
||||
card['colorIdentity'] = [letter]
|
||||
if 'colors' in card:
|
||||
if not colorMap[letter] in card['colors']:
|
||||
card['colors'].append(colorMap[letter])
|
||||
else:
|
||||
card['colors'] = [colorMap[letter]]
|
||||
if 'text' in card:
|
||||
for CID in colorMap:
|
||||
if '{' + CID + '}' in card['text']:
|
||||
if 'colorIdentity' in card:
|
||||
if not CID in card['colorIdentity']:
|
||||
card['colorIdentity'] += CID
|
||||
else:
|
||||
card['colorIdentity'] = [CID]
|
||||
|
||||
for card in mtgjson['cards']:
|
||||
isManual = False
|
||||
for manualCard in manual_cards:
|
||||
if card['name'] == manualCard['name']:
|
||||
mtgjson2.append(manualCard)
|
||||
print 'overwriting card ' + card['name']
|
||||
isManual = True
|
||||
if not isManual and not card['name'] in delete_cards:
|
||||
mtgjson2.append(card)
|
||||
|
||||
for manualCard in manual_cards:
|
||||
addManual = True
|
||||
for card in mtgjson['cards']:
|
||||
if manualCard['name'] == card['name']:
|
||||
addManual = False
|
||||
if addManual:
|
||||
mtgjson2.append(manualCard)
|
||||
print 'inserting manual card ' + manualCard['name']
|
||||
|
||||
mtgjson = {"cards": mtgjson2}
|
||||
|
||||
for card in mtgjson['cards']:
|
||||
for cardCorrection in card_corrections:
|
||||
if card['name'] == cardCorrection:
|
||||
for correctionType in card_corrections[cardCorrection]:
|
||||
if not correctionType == 'name':
|
||||
card[correctionType] = card_corrections[cardCorrection][correctionType]
|
||||
if 'name' in card_corrections[cardCorrection]:
|
||||
card['name'] = card_corrections[cardCorrection]['name']
|
||||
return mtgjson
|
||||
|
||||
def errorcheck(mtgjson):
|
||||
errors = []
|
||||
for card in mtgjson['cards']:
|
||||
for key in card:
|
||||
if key == "":
|
||||
errors.append({"name": card['name'], "key": key, "value": ""})
|
||||
requiredKeys = ['name','type']
|
||||
for requiredKey in requiredKeys:
|
||||
if not requiredKey in card:
|
||||
errors.append({"name": card['name'], "key": key, "missing": True})
|
||||
if 'text' in card:
|
||||
#foo = 1
|
||||
card['text'] = card['text'].replace('<i>','').replace('</i>','').replace('<em>','').replace('</em','').replace('(','')
|
||||
if 'type' in card:
|
||||
if 'Planeswalker' in card['type']:
|
||||
if not 'loyalty' in card:
|
||||
errors.append({"name": card['name'], "key": "loyalty", "value": ""})
|
||||
if not card['rarity'] == 'Mythic Rare':
|
||||
errors.append({"name": card['name'], "key": "rarity", "value": card['rarity']})
|
||||
if not 'subtypes' in card:
|
||||
errors.append({"name": card['name'], "key": "subtypes", "oldvalue": "", "newvalue": card['name'].split(" ")[0], "fixed": True})
|
||||
if not card['name'].split(' ')[0] == 'Ob' and not card['name'].split(' ') == 'Nicol':
|
||||
card["subtypes"] = card['name'].split(" ")[0]
|
||||
else:
|
||||
card["subtypes"] = card['name'].split(" ")[1]
|
||||
if not 'types' in card:
|
||||
#errors.append({"name": card['name'], "key": "types", "fixed": True, "oldvalue": "", "newvalue": ["Planeswalker"]})
|
||||
card['types'] = ["Planeswalker"]
|
||||
elif not "Planeswalker" in card['types']:
|
||||
#errors.append({"name": card['name'], "key": "types", "fixed": True, "oldvalue": card['types'], "newvalue": card['types'] + ["Planeswalker"]})
|
||||
card['types'].append("Planeswalker")
|
||||
if 'Creature' in card['type']:
|
||||
if not 'power' in card:
|
||||
errors.append({"name": card['name'], "key": "power", "value": ""})
|
||||
if not 'toughness' in card:
|
||||
errors.append({"name": card['name'], "key": "toughness", "value": ""})
|
||||
if not 'subtypes' in card:
|
||||
errors.append({"name": card['name'], "key": "subtypes", "value": ""})
|
||||
if 'manaCost' in card:
|
||||
workingCMC = 0
|
||||
stripCost = card['manaCost'].replace('{','').replace('}','')
|
||||
for manaSymbol in stripCost:
|
||||
if manaSymbol.isdigit():
|
||||
workingCMC += int(manaSymbol)
|
||||
elif not manaSymbol == 'X':
|
||||
workingCMC += 1
|
||||
if not 'cmc' in card:
|
||||
errors.append({"name": card['name'], "key": "cmc", "value": ""})
|
||||
elif not card['cmc'] == workingCMC:
|
||||
errors.append({"name": card['name'], "key": "cmc", "oldvalue": card['cmc'], "newvalue": workingCMC, "fixed": True, "match": card['manaCost']})
|
||||
card['cmc'] = workingCMC
|
||||
if not 'cmc' in card:
|
||||
errors.append({"name": card['name'], "key": "cmc", "value": ""})
|
||||
else:
|
||||
if not isinstance(card['cmc'], int):
|
||||
errors.append({"name": card['name'], "key": "cmc", "oldvalue": card['cmc'], "newvalue": int(card['cmc']), "fixed": True})
|
||||
card['cmc'] = int(card['cmc'])
|
||||
else:
|
||||
if card['cmc'] > 0:
|
||||
if not 'manaCost' in card:
|
||||
errors.append({"name": card['name'], "key": "manaCost", "value": "", "match": card['cmc']})
|
||||
else:
|
||||
if 'manaCost' in card:
|
||||
errors.append({"name": card['name'], "key": "manaCost", "oldvalue": card['manaCost'], "fixed": True})
|
||||
del card["manaCost"]
|
||||
if 'colors' in card:
|
||||
if not 'colorIdentity' in card:
|
||||
if 'text' in card:
|
||||
if not 'devoid' in card['text'].lower():
|
||||
errors.append({"name": card['name'], "key": "colorIdentity", "value": ""})
|
||||
else:
|
||||
errors.append({"name": card['name'], "key": "colorIdentity", "value": ""})
|
||||
if 'colorIdentity' in card:
|
||||
if not 'colors' in card:
|
||||
#this one will false positive on emerge cards
|
||||
if not 'Land' in card['type'] and not 'Artifact' in card['type'] and not 'Eldrazi' in card['type']:
|
||||
if 'text' in card:
|
||||
if not 'emerge' in card['text'].lower() and not 'devoid' in card['text'].lower():
|
||||
errors.append({"name": card['name'], "key": "colors", "value": ""})
|
||||
else:
|
||||
errors.append({"name": card['name'], "key": "colors", "value": ""})
|
||||
#if not 'Land' in card['type'] and not 'Artifact' in card['type'] and not 'Eldrazi' in card['type']:
|
||||
# errors.append({"name": card['name'], "key": "colors", "value": ""})
|
||||
if not 'url' in card:
|
||||
errors.append({"name": card['name'], "key": "url", "value": ""})
|
||||
elif len(card['url']) < 10:
|
||||
errors.append({"name": card['name'], "key": "url", "value": ""})
|
||||
if 'layout' in card:
|
||||
if card['layout'] == 'split' or card['layout'] == 'meld' or card['layout'] == 'aftermath':
|
||||
if not 'names' in card:
|
||||
errors.append({"name": card['name'], "key": "names", "value": ""})
|
||||
if 'number' in card:
|
||||
if not 'a' in card['number'] and not 'b' in card['number'] and not 'c' in card['number']:
|
||||
errors.append({"name": card['name'], "key": "number", "value": card['number']})
|
||||
if not 'number' in card:
|
||||
errors.append({"name": card['name'], "key": "number", "value": ""})
|
||||
if not 'types' in card:
|
||||
errors.append({"name": card['name'], "key": "types", "value": ""})
|
||||
#print errors
|
||||
return [mtgjson, errors]
|
||||
|
||||
def get_scryfall(setUrl):
|
||||
#getUrl = 'https://api.scryfall.com/cards/search?q=++e:'
|
||||
#setUrl = getUrl + setname.lower()
|
||||
setDone = False
|
||||
scryfall = []
|
||||
|
||||
#firstPass = True
|
||||
while setDone == False:
|
||||
setcards = requests.get(setUrl)
|
||||
setcards = setcards.json()
|
||||
if setcards.has_key('data'):
|
||||
#if firstPass:
|
||||
# cards[set]["cards"] = []
|
||||
# firstPass = False
|
||||
scryfall.append(setcards['data'])
|
||||
#for setkey in mtgjson[set]:
|
||||
# if 'card' not in setkey:
|
||||
# if set != 'NMS':
|
||||
# cards[set][setkey] = mtgjson[set][setkey]
|
||||
else:
|
||||
setDone = True
|
||||
print setUrl
|
||||
print setcards
|
||||
print 'No data - ' + set
|
||||
#noset.append(set)
|
||||
time.sleep(.1)
|
||||
if setcards.has_key('has_more'):
|
||||
if setcards['has_more'] == True:
|
||||
#print 'Going to extra page of ' + set
|
||||
setUrl = setcards['next_page']
|
||||
else:
|
||||
setDone = True
|
||||
else:
|
||||
setDone = True
|
||||
|
||||
scryfall = convert_scryfall(scryfall[0])
|
||||
return {'cards': scryfall}
|
||||
print
|
||||
|
||||
def convert_scryfall(scryfall):
|
||||
cards2 = []
|
||||
for card in scryfall:
|
||||
card2 = {}
|
||||
card2['cmc'] = int((card['cmc']).split('.')[0])
|
||||
if card.has_key('mana_cost'):
|
||||
card2['manaCost'] = card['mana_cost'].replace('{','').replace('}','')
|
||||
else:
|
||||
card2['manaCost'] = ''
|
||||
card2['name'] = card['name']
|
||||
card2['number'] = card['collector_number']
|
||||
card2['rarity'] = card['rarity'].replace('mythic','mythic rare').title()
|
||||
if card.has_key('oracle_text'):
|
||||
card2['text'] = card['oracle_text'].replace(u"\u2022 ", u'*').replace(u"\u2014",'-').replace(u"\u2212","-")
|
||||
else:
|
||||
card2['text'] = ''
|
||||
card2['url'] = card['image_uri']
|
||||
card2['type'] = card['type_line'].replace(u'—','-')
|
||||
cardtypes = card['type_line'].split(u' — ')[0].replace('Legendary ','').replace('Snow ','')\
|
||||
.replace('Elite ','').replace('Basic ','').replace('World ','').replace('Ongoing ','')
|
||||
cardtypes = cardtypes.split(' ')
|
||||
if u' — ' in card['type_line']:
|
||||
cardsubtypes = card['type_line'].split(u' — ')[1]
|
||||
if ' ' in cardsubtypes:
|
||||
card2['subtypes'] = cardsubtypes.split(' ')
|
||||
else:
|
||||
card2['subtypes'] = [cardsubtypes]
|
||||
if 'Legendary' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Legendary')
|
||||
else:
|
||||
card2['supertypes'] = ['Legendary']
|
||||
if 'Snow' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Snow')
|
||||
else:
|
||||
card2['supertypes'] = ['Snow']
|
||||
if 'Elite' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Elite')
|
||||
else:
|
||||
card2['supertypes'] = ['Elite']
|
||||
if 'Basic' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Basic')
|
||||
else:
|
||||
card2['supertypes'] = ['Basic']
|
||||
if 'World' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('World')
|
||||
else:
|
||||
card2['supertypes'] = ['World']
|
||||
if 'Ongoing' in card['type_line']:
|
||||
if card2.has_key('supertypes'):
|
||||
card2['supertypes'].append('Ongoing')
|
||||
else:
|
||||
card2['supertypes'] = ['Ongoing']
|
||||
card2['types'] = cardtypes
|
||||
if card.has_key('color_identity'):
|
||||
card2['colorIdentity'] = card['color_identity']
|
||||
if card.has_key('colors'):
|
||||
if not card['colors'] == []:
|
||||
card2['colors'] = []
|
||||
if 'W' in card['colors']:
|
||||
card2['colors'].append("White")
|
||||
if 'U' in card['colors']:
|
||||
card2['colors'].append("Blue")
|
||||
if 'B' in card['colors']:
|
||||
card2['colors'].append("Black")
|
||||
if 'R' in card['colors']:
|
||||
card2['colors'].append("Red")
|
||||
if 'G' in card['colors']:
|
||||
card2['colors'].append("Green")
|
||||
#card2['colors'] = card['colors']
|
||||
if card.has_key('all_parts'):
|
||||
card2['names'] = []
|
||||
for partname in card['all_parts']:
|
||||
card2['names'].append(partname['name'])
|
||||
if card.has_key('power'):
|
||||
card2['power'] = card['power']
|
||||
if card.has_key('toughness'):
|
||||
card2['toughness'] = card['toughness']
|
||||
if card.has_key('layout'):
|
||||
if card['layout'] != 'normal':
|
||||
card2['layout'] = card['layout']
|
||||
if card.has_key('loyalty'):
|
||||
card2['loyalty'] = card['loyalty']
|
||||
if card.has_key('artist'):
|
||||
card2['artist'] = card['artist']
|
||||
#if card.has_key('source'):
|
||||
# card2['source'] = card['source']
|
||||
#if card.has_key('rulings'):
|
||||
# card2['rulings'] = card['rulings']
|
||||
if card.has_key('flavor_text'):
|
||||
card2['flavor'] = card['flavor_text']
|
||||
if card.has_key('multiverse_id'):
|
||||
card2['multiverseid'] = card['multiverse_id']
|
||||
|
||||
cards2.append(card2)
|
||||
|
||||
return cards2
|
||||
print
|
||||
|
||||
def smash_mtgs_scryfall(mtgs, scryfall):
|
||||
for mtgscard in mtgs['cards']:
|
||||
cardFound = False
|
||||
for scryfallcard in scryfall['cards']:
|
||||
if scryfallcard['name'] == mtgscard['name']:
|
||||
for key in scryfallcard:
|
||||
if key in mtgscard:
|
||||
if not mtgscard[key] == scryfallcard[key]:
|
||||
print "%s's key %s\nMTGS : %s\nScryfall: %s" % (mtgscard['name'], key, mtgscard[key], scryfallcard[key])
|
||||
cardFound = True
|
||||
if not cardFound:
|
||||
print "MTGS has card %s and Scryfall does not." % mtgscard['name']
|
||||
for scryfallcard in scryfall['cards']:
|
||||
cardFound = False
|
||||
for mtgscard in mtgs['cards']:
|
||||
if scryfallcard['name'] == mtgscard['name']:
|
||||
cardFound = True
|
||||
if not cardFound:
|
||||
print "Scryfall has card %s and MTGS does not." % scryfallcard['name']
|
||||
|
||||
return mtgs
|
||||
|
||||
def scrape_fullspoil(url, showRarityColors=False, showFrameColors=False, manual_cards=[], delete_cards=[], split_cards=[]):
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = []
|
||||
cardtree = tree.xpath('//*[@id="content-detail-page-of-an-article"]')
|
||||
for child in cardtree:
|
||||
cardElements = child.xpath('//*/p/img')
|
||||
cardcount = 0
|
||||
for cardElement in cardElements:
|
||||
card = {
|
||||
"name": cardElement.attrib['alt'].replace(u"\u2019",'\'').split(' /// ')[0],
|
||||
"img": cardElement.attrib['src']
|
||||
}
|
||||
card["url"] = card["img"]
|
||||
card["cmc"] = 0
|
||||
card["manaCost"] = ""
|
||||
card["type"] = "Land"
|
||||
card["types"] = ["Land"]
|
||||
card["text"] = ""
|
||||
#card["colorIdentity"] = [""]
|
||||
|
||||
if card['name'] in split_cards:
|
||||
card["names"] = [card['name'], split_cards[card['name']]]
|
||||
card["layout"] = "split"
|
||||
notSplit = True
|
||||
for backsplit in split_cards:
|
||||
if card['name'] == split_cards[backsplit]:
|
||||
notSplit = False
|
||||
if notSplit and not card['name'] in delete_cards:
|
||||
cards.append(card)
|
||||
cardcount += 1
|
||||
print "Spoil Gallery has " + str(cardcount) + " cards."
|
||||
#print mtgjson
|
||||
#print cards
|
||||
#return cards
|
||||
get_rarities_by_symbol(fullspoil, showRarityColors)
|
||||
get_colors_by_frame(fullspoil, showFrameColors)
|
||||
return cards
|
||||
|
||||
def get_rarities_by_symbol(fullspoil, split_cards=[]):
|
||||
symbolPixels = (234, 215, 236, 218)
|
||||
highVariance = 15
|
||||
colorAverages = {
|
||||
"Common": [225, 224, 225],
|
||||
"Uncommon": [194, 228, 240],
|
||||
"Rare": [225, 201, 134],
|
||||
"Mythic Rare": [249, 163, 15]
|
||||
}
|
||||
#symbolCount = 0
|
||||
for card in fullspoil:
|
||||
cardImage = Image.open('images/' + card['name'] + '.png')
|
||||
if card['name'] in split_cards:
|
||||
setSymbol = cardImage.crop((234, 134, 236, 137))
|
||||
else:
|
||||
setSymbol = cardImage.crop(symbolPixels)
|
||||
cardHistogram = setSymbol.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
for color in colorAverages:
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + abs(colorAverages[color][0] - reds)
|
||||
colorVariance = colorVariance + abs(colorAverages[color][1] - greens)
|
||||
colorVariance = colorVariance + abs(colorAverages[color][2] - blues)
|
||||
if colorVariance < variance:
|
||||
variance = colorVariance
|
||||
card['rarity'] = color
|
||||
if variance > highVariance:
|
||||
# if a card isn't close to any of the colors, it's probably a planeswalker? make it mythic.
|
||||
print card['name'], 'has high variance of', variance, ', closest rarity is', card['rarity']
|
||||
card['rarity'] = "Mythic Rare"
|
||||
print card['name'], '$', reds, greens, blues
|
||||
#if symbolCount < 10:
|
||||
#setSymbol.save('images/' + card['name'] + '.symbol.jpg')
|
||||
# symbolCount += 1
|
||||
return fullspoil
|
||||
print
|
||||
|
||||
def get_colors_by_frame(fullspoil, split_cards=[]):
|
||||
framePixels = (20, 11, 76, 16)
|
||||
highVariance = 10
|
||||
colorAverages = {
|
||||
"White": [231,225,200],
|
||||
"Blue": [103,193,230],
|
||||
"Black": [58, 61, 54],
|
||||
"Red": [221, 122, 101],
|
||||
"Green": [118, 165, 131],
|
||||
"Multicolor": [219, 200, 138],
|
||||
"Artifact": [141, 165, 173],
|
||||
"Colorless": [216, 197, 176],
|
||||
}
|
||||
#symbolCount = 0
|
||||
for card in fullspoil:
|
||||
cardImage = Image.open('images/' + card['name'] + '.png')
|
||||
if card['name'] in split_cards:
|
||||
continue
|
||||
#setSymbol = cardImage.crop((234, 134, 236, 137))
|
||||
#else:
|
||||
cardColor = cardImage.crop(framePixels)
|
||||
|
||||
cardHistogram = cardColor.histogram()
|
||||
reds = cardHistogram[0:256]
|
||||
greens = cardHistogram[256:256 * 2]
|
||||
blues = cardHistogram[256 * 2: 256 * 3]
|
||||
reds = sum(i * w for i, w in enumerate(reds)) / sum(reds)
|
||||
greens = sum(i * w for i, w in enumerate(greens)) / sum(greens)
|
||||
blues = sum(i * w for i, w in enumerate(blues)) / sum(blues)
|
||||
variance = 768
|
||||
for color in colorAverages:
|
||||
colorVariance = 0
|
||||
colorVariance = colorVariance + abs(colorAverages[color][0] - reds)
|
||||
colorVariance = colorVariance + abs(colorAverages[color][1] - greens)
|
||||
colorVariance = colorVariance + abs(colorAverages[color][2] - blues)
|
||||
if colorVariance < variance:
|
||||
variance = colorVariance
|
||||
card['colors'] = [color]
|
||||
if variance > highVariance:
|
||||
# if a card isn't close to any of the colors, it's probably a planeswalker? make it mythic.
|
||||
#print card['name'], 'has high variance of', variance, ', closest rarity is', card['color']
|
||||
print card['name'], '$ colors $', reds, greens, blues
|
||||
#if 'Multicolor' in card['colors'] or 'Colorless' in card['colors'] or 'Artifact' in card['colors']:
|
||||
# card['colors'] = []
|
||||
#if symbolCount < 10:
|
||||
#cardColor.save('images/' + card['name'] + '.symbol.jpg')
|
||||
# symbolCount += 1
|
||||
return fullspoil
|
||||
|
||||
def get_image_urls(mtgjson, isfullspoil, setname, setlongname, setSize=269):
|
||||
IMAGES = 'http://magic.wizards.com/en/content/' + setlongname.lower().replace(' ', '-') + '-cards'
|
||||
IMAGES2 = 'http://mythicspoiler.com/newspoilers.html'
|
||||
IMAGES3 = 'http://magic.wizards.com/en/articles/archive/card-image-gallery/' + setlongname.lower().replace(' ', '-')
|
||||
|
||||
text = requests.get(IMAGES).text
|
||||
text2 = requests.get(IMAGES2).text
|
||||
text3 = requests.get(IMAGES3).text
|
||||
wotcpattern = r'<img alt="{}.*?" src="(?P<img>.*?\.png)"'
|
||||
mythicspoilerpattern = r' src="' + setname.lower() + '/cards/{}.*?.jpg">'
|
||||
for c in mtgjson['cards']:
|
||||
match = re.search(wotcpattern.format(c['name'].replace('\'','’')), text, re.DOTALL)
|
||||
if match:
|
||||
c['url'] = match.groupdict()['img']
|
||||
else:
|
||||
match3 = re.search(wotcpattern.format(c['name'].replace('\'','’')), text3, re.DOTALL)
|
||||
if match3:
|
||||
c['url'] = match3.groupdict()['img']
|
||||
else:
|
||||
match2 = re.search(mythicspoilerpattern.format((c['name']).lower().replace(' ', '').replace(''', '').replace('-', '').replace('\'','').replace(',', '')), text2, re.DOTALL)
|
||||
if match2 and not isfullspoil:
|
||||
c['url'] = match2.group(0).replace(' src="', 'http://mythicspoiler.com/').replace('">', '')
|
||||
pass
|
||||
#if ('Creature' in c['type'] and not c.has_key('power')) or ('Vehicle' in c['type'] and not c.has_key('power')):
|
||||
# print(c['name'] + ' is a creature w/o p/t img: ' + c['url'])
|
||||
if len(str(c['url'])) < 10:
|
||||
print(c['name'] + ' has no image.')
|
||||
return mtgjson
|
||||
|
||||
def write_xml(mtgjson, setname, setlongname, setreleasedate, split_cards=[]):
|
||||
if not os.path.isdir('out/'):
|
||||
os.makedirs('out/')
|
||||
cardsxml = open('out/' + setname + '.xml', 'w+')
|
||||
cardsxml.truncate()
|
||||
count = 0
|
||||
dfccount = 0
|
||||
newest = ''
|
||||
related = 0
|
||||
cardsxml.write("<?xml version='1.0' encoding='UTF-8'?>\n"
|
||||
"<cockatrice_carddatabase version='3'>\n"
|
||||
"<sets>\n<set>\n<name>"
|
||||
+ setname +
|
||||
"</name>\n"
|
||||
"<longname>"
|
||||
+ setlongname +
|
||||
"</longname>\n"
|
||||
"<settype>Expansion</settype>\n"
|
||||
"<releasedate>"
|
||||
+ setreleasedate +
|
||||
"</releasedate>\n"
|
||||
"</set>\n"
|
||||
"</sets>\n"
|
||||
"<cards>\n")
|
||||
#print mtgjson
|
||||
for card in mtgjson["cards"]:
|
||||
for carda in split_cards:
|
||||
if card["name"] == split_cards[carda]:
|
||||
continue
|
||||
if count == 0:
|
||||
newest = card["name"]
|
||||
count += 1
|
||||
name = card["name"]
|
||||
if card.has_key("manaCost"):
|
||||
manacost = card["manaCost"].replace('{', '').replace('}', '')
|
||||
else:
|
||||
manacost = ""
|
||||
if card.has_key("power") or card.has_key("toughness"):
|
||||
if card["power"]:
|
||||
pt = str(card["power"]) + "/" + str(card["toughness"])
|
||||
else:
|
||||
pt = 0
|
||||
else:
|
||||
pt = 0
|
||||
if card.has_key("text"):
|
||||
text = card["text"]
|
||||
else:
|
||||
text = ""
|
||||
cardcmc = str(card['cmc'])
|
||||
cardtype = card["type"]
|
||||
if card.has_key("names"):
|
||||
if "layout" in card:
|
||||
if card["layout"] != 'split':
|
||||
if len(card["names"]) > 1:
|
||||
if card["names"][0] == card["name"]:
|
||||
related = card["names"][1]
|
||||
text += '\n\n(Related: ' + card["names"][1] + ')'
|
||||
dfccount += 1
|
||||
elif card['names'][1] == card['name']:
|
||||
related = card["names"][0]
|
||||
text += '\n\n(Related: ' + card["names"][0] + ')'
|
||||
else:
|
||||
for carda in split_cards:
|
||||
if card["name"] == carda:
|
||||
cardb = split_cards[carda]
|
||||
for jsoncard in mtgjson["cards"]:
|
||||
if cardb == jsoncard["name"]:
|
||||
cardtype += " // " + jsoncard["type"]
|
||||
manacost += " // " + (jsoncard["manaCost"]).replace('{', '').replace('}', '')
|
||||
cardcmc += " // " + str(jsoncard["cmc"])
|
||||
text += "\n---\n" + jsoncard["text"]
|
||||
name += " // " + cardb
|
||||
else:
|
||||
print card["name"] + " has multiple names and no 'layout' key"
|
||||
|
||||
|
||||
tablerow = "1"
|
||||
if "Land" in cardtype:
|
||||
tablerow = "0"
|
||||
elif "Sorcery" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Instant" in cardtype:
|
||||
tablerow = "3"
|
||||
elif "Creature" in cardtype:
|
||||
tablerow = "2"
|
||||
|
||||
if 'number' in card:
|
||||
if 'b' in card['number']:
|
||||
if 'layout' in card:
|
||||
if card['layout'] == 'split':
|
||||
#print "We're skipping " + card['name'] + " because it's the right side of a split card"
|
||||
continue
|
||||
|
||||
cardsxml.write("<card>\n")
|
||||
cardsxml.write("<name>" + name.encode('utf-8') + "</name>\n")
|
||||
cardsxml.write('<set rarity="' + card['rarity'] + '" picURL="' + card["url"] + '">' + setname + '</set>\n')
|
||||
cardsxml.write("<manacost>" + manacost.encode('utf-8') + "</manacost>\n")
|
||||
cardsxml.write("<cmc>" + cardcmc + "</cmc>\n")
|
||||
if card.has_key('colors'):
|
||||
colorTranslate = {
|
||||
"White": "W",
|
||||
"Blue": "U",
|
||||
"Black": "B",
|
||||
"Red": "R",
|
||||
"Green": "G"
|
||||
}
|
||||
for color in card['colors']:
|
||||
cardsxml.write('<color>' + colorTranslate[color] + '</color>\n')
|
||||
if name + ' enters the battlefield tapped' in text:
|
||||
cardsxml.write("<cipt>1</cipt>\n")
|
||||
cardsxml.write("<type>" + cardtype.encode('utf-8') + "</type>\n")
|
||||
if pt:
|
||||
cardsxml.write("<pt>" + pt + "</pt>\n")
|
||||
if card.has_key('loyalty'):
|
||||
cardsxml.write("<loyalty>" + str(card['loyalty']) + "</loyalty>\n")
|
||||
cardsxml.write("<tablerow>" + tablerow + "</tablerow>\n")
|
||||
cardsxml.write("<text>" + text.encode('utf-8') + "</text>\n")
|
||||
if related:
|
||||
# for relatedname in related:
|
||||
cardsxml.write("<related>" + related.encode('utf-8') + "</related>\n")
|
||||
related = ''
|
||||
|
||||
cardsxml.write("</card>\n")
|
||||
|
||||
cardsxml.write("</cards>\n</cockatrice_carddatabase>")
|
||||
|
||||
print 'XML STATS'
|
||||
print 'Total cards: ' + str(count)
|
||||
if dfccount > 0:
|
||||
print 'DFC: ' + str(dfccount)
|
||||
print 'Newest: ' + str(newest)
|
||||
print 'Runtime: ' + str(datetime.datetime.today().strftime('%H:%M')) + ' on ' + str(datetime.date.today())
|
||||
|
||||
def pretty_xml(setcode):
|
||||
prettyxml = xml.dom.minidom.parse('out/' + setcode + '.xml') # or xml.dom.minidom.parseString(xml_string)
|
||||
pretty_xml_as_string = prettyxml.toprettyxml(newl='')
|
||||
return pretty_xml_as_string
|
||||
|
||||
def make_allsets(AllSets, mtgjson, setname):
|
||||
AllSets[setname] = mtgjson
|
||||
return AllSets
|
||||
|
||||
def scrape_masterpieces(url='http://www.mtgsalvation.com/spoilers/181-amonkhet-invocations', cardurl='http://www.mtgsalvation.com/cards/amonkhet-invocations/'):
|
||||
page = requests.get(url)
|
||||
tree = html.fromstring(page.content)
|
||||
cards = []
|
||||
cardstree = tree.xpath('//*[contains(@class, "log-card")]')
|
||||
for child in cardstree:
|
||||
#print child.text
|
||||
cardpage = requests.get(cardurl + child.attrib['data-card-id'] + '-' + child.text.replace(' ','-'))
|
||||
tree = html.fromstring(cardpage.content)
|
||||
cardtree = tree.xpath('//img[contains(@class, "card-spoiler-image")]')
|
||||
#print cardtree[0]
|
||||
card = {
|
||||
"name": child.text,
|
||||
"url": cardtree[0].attrib['src']
|
||||
}
|
||||
cards.append(card)
|
||||
return cards
|
||||
|
||||
def make_masterpieces(headers, AllSets, spoil):
|
||||
masterpieces = scrape_masterpieces(headers['mtgsurl'], headers['mtgscardpath'])
|
||||
masterpieces2 = []
|
||||
for masterpiece in masterpieces:
|
||||
matched = False
|
||||
for set in AllSets:
|
||||
if not matched:
|
||||
for oldcard in AllSets[set]['cards']:
|
||||
if oldcard['name'] == masterpiece['name'] and not matched:
|
||||
mixcard = oldcard
|
||||
mixcard['url'] = masterpiece['url']
|
||||
mixcard['rarity'] = 'Mythic Rare'
|
||||
masterpieces2.append(mixcard)
|
||||
matched = True
|
||||
break
|
||||
for spoilcard in spoil['cards']:
|
||||
if not matched:
|
||||
if spoilcard['name'] == masterpiece['name']:
|
||||
mixcard = spoilcard
|
||||
mixcard['rarity'] = 'Mythic Rare'
|
||||
mixcard['url'] = masterpiece['url']
|
||||
masterpieces2.append(mixcard)
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
print "We couldn't find a card object to assign the data to for masterpiece " + masterpiece['name']
|
||||
masterpieces2.append(masterpiece)
|
||||
mpsjson = {
|
||||
"name": headers['setlongname'],
|
||||
"alternativeNames": headers['alternativeNames'],
|
||||
"code": "MPS_AKH",
|
||||
"releaseDate": headers['setreleasedate'],
|
||||
"border": "black",
|
||||
"type": "masterpiece",
|
||||
"cards": masterpieces2
|
||||
}
|
||||
return mpsjson
|
||||
|
||||
def get_allsets():
|
||||
class MyOpener(urllib.FancyURLopener):
|
||||
version = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko / 20071127 Firefox / 2.0.0.11'
|
||||
|
||||
opener = MyOpener()
|
||||
opener.retrieve('http://mtgjson.com/json/AllSets.json', 'AllSets.pre.json')
|
||||
with open('AllSets.pre.json') as data_file:
|
||||
AllSets = json.load(data_file)
|
||||
return AllSets
|
||||
|
||||
def add_headers(mtgjson, setinfos):
|
||||
mtgjson2 = {
|
||||
"block": setinfos['blockname'],
|
||||
"border": "black",
|
||||
"code": setinfos['setname'],
|
||||
"magicCardsInfoCode": setinfos['setname'].lower(),
|
||||
"name": setinfos['setlongname'],
|
||||
"releaseDate": setinfos['setreleasedate'],
|
||||
"type": setinfos['settype'],
|
||||
"booster": [
|
||||
[
|
||||
"rare",
|
||||
"mythic rare"
|
||||
],
|
||||
"uncommon",
|
||||
"uncommon",
|
||||
"uncommon",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"common",
|
||||
"land",
|
||||
"marketing"
|
||||
],
|
||||
"cards": mtgjson['cards']
|
||||
}
|
||||
return mtgjson2
|
||||
56
tox.ini
Normal file
56
tox.ini
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
[tox]
|
||||
envlist = isort-inplace, black-inplace, mypy, lint
|
||||
|
||||
[testenv]
|
||||
basepython = python3.7
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/requirements_test.txt
|
||||
setenv = PYTHONPATH = {toxinidir}
|
||||
passenv = PYTHONPATH = {toxinidir}
|
||||
|
||||
[testenv:black-inplace]
|
||||
description = Run black and edit all files in place
|
||||
skip_install = True
|
||||
deps = black
|
||||
commands = black magic_spoiler/
|
||||
|
||||
# Active Tests
|
||||
[testenv:yapf-inplace]
|
||||
description = Run yapf and edit all files in place
|
||||
skip_install = True
|
||||
deps = yapf
|
||||
commands = yapf --in-place --recursive --parallel magic_spoiler/
|
||||
|
||||
[testenv:mypy]
|
||||
description = mypy static type checking only
|
||||
deps = mypy
|
||||
commands = mypy {posargs:magic_spoiler/}
|
||||
|
||||
[testenv:lint]
|
||||
description = Run linting tools
|
||||
deps = pylint
|
||||
commands = pylint magic_spoiler/ --rcfile=.pylintrc
|
||||
|
||||
# Inactive Tests
|
||||
[testenv:yapf-check]
|
||||
description = Dry-run yapf to see if reformatting is needed
|
||||
skip_install = True
|
||||
deps = yapf
|
||||
# TODO make it error exit if there's a diff
|
||||
commands = yapf --diff --recursive --parallel magic_spoiler/
|
||||
|
||||
[testenv:isort-check]
|
||||
description = dry-run isort to see if imports need resorting
|
||||
deps = isort
|
||||
commands = isort --check-only
|
||||
|
||||
[testenv:isort-inplace]
|
||||
description = Sort imports
|
||||
deps = isort
|
||||
commands = isort -rc magic_spoiler/
|
||||
|
||||
[testenv:unit]
|
||||
description = Run unit tests with coverage and mypy type checking
|
||||
extras = dev
|
||||
deps = pytest
|
||||
commands = pytest --cov=magic_spoiler {posargs:tests/}
|
||||
Loading…
Reference in New Issue
Block a user