initial commit

This commit is contained in:
crflynn
2018-04-05 01:07:14 -04:00
commit 741e385e1e
28 changed files with 1627 additions and 0 deletions

13
.gitignore vendored Normal file
View File

@@ -0,0 +1,13 @@
# credentials
secret.*
# mac osx
.DS_Store
# python bytecode
*.py[cod]
__pycache__
# ignore
ignore/

21
Pipfile Normal file
View File

@@ -0,0 +1,21 @@
[[source]]
url = "https://pypi.python.org/simple"
verify_ssl = true
name = "pypi"
[packages]
google-cloud-bigquery = "*"
pandas = "*"
"psycopg2" = "*"
flask = "*"
github-flask = "*"
flask-sqlalchemy = "*"
flask-migrate = "*"
flask-login = "*"
[dev-packages]

379
Pipfile.lock generated Normal file
View File

@@ -0,0 +1,379 @@
{
"_meta": {
"hash": {
"sha256": "36b3e674443d732f498a3aad93d143a2036297b5a24fbb282a7d48cd4dd83ad2"
},
"host-environment-markers": {
"implementation_name": "cpython",
"implementation_version": "3.6.3",
"os_name": "posix",
"platform_machine": "x86_64",
"platform_python_implementation": "CPython",
"platform_release": "17.3.0",
"platform_system": "Darwin",
"platform_version": "Darwin Kernel Version 17.3.0: Thu Nov 9 18:09:22 PST 2017; root:xnu-4570.31.3~1/RELEASE_X86_64",
"python_full_version": "3.6.3",
"python_version": "3.6",
"sys_platform": "darwin"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.python.org/simple",
"verify_ssl": true
}
]
},
"default": {
"alembic": {
"hashes": [
"sha256:85bd3ea7633024e4930900bc64fb58f9742dedbc6ebb6ecf25be2ea9a3c1b32e"
],
"version": "==0.9.9"
},
"cachetools": {
"hashes": [
"sha256:4319bbb78172e7bcf99423e1ecd6914b32336ccfe97d2058ffe62e641a7f3abe",
"sha256:ede01f2d3cbd6ddc9e35e16c2b0ce011d8bb70ce0dbaf282f5b4df24b213bc5d"
],
"version": "==2.0.1"
},
"certifi": {
"hashes": [
"sha256:14131608ad2fd56836d33a71ee60fa1c82bc9d2c8d98b7bdbc631fe1b3cd1296",
"sha256:edbc3f203427eef571f79a7692bb160a2b0f7ccaa31953e99bd17e307cf63f7d"
],
"version": "==2018.1.18"
},
"chardet": {
"hashes": [
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691",
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
],
"version": "==3.0.4"
},
"click": {
"hashes": [
"sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d",
"sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
],
"version": "==6.7"
},
"flask": {
"hashes": [
"sha256:0749df235e3ff61ac108f69ac178c9770caeaccad2509cb762ce1f65570a8856",
"sha256:49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1"
],
"version": "==0.12.2"
},
"flask-login": {
"hashes": [
"sha256:c815c1ac7b3e35e2081685e389a665f2c74d7e077cb93cecabaea352da4752ec"
],
"version": "==0.4.1"
},
"flask-migrate": {
"hashes": [
"sha256:493f9b3795985b9b4915bf3b7d16946697f027b73545384e7d9e3a79f989d2fe",
"sha256:b709ca8642559c3c5a81a33ab10839fa052177accd5ba821047a99db635255ed"
],
"version": "==2.1.1"
},
"flask-sqlalchemy": {
"hashes": [
"sha256:3bc0fac969dd8c0ace01b32060f0c729565293302f0c4269beed154b46bec50b",
"sha256:5971b9852b5888655f11db634e87725a9031e170f37c0ce7851cf83497f56e53"
],
"version": "==2.3.2"
},
"github-flask": {
"hashes": [
"sha256:24600b720f698bac10667b76b136995ba7821d884e58b27e2a18ca0e4760c786"
],
"version": "==3.2.0"
},
"google-api-core": {
"hashes": [
"sha256:7618a9c2ee84c0d99f9c7823675c15024b25094bb920f49d204a72107d856aac",
"sha256:b4f103de6bd38ab346f7d17236f6098a51ebdff733ff69956a0f1e29cb35f10b"
],
"version": "==1.1.0"
},
"google-auth": {
"hashes": [
"sha256:34088434cb2a2409360b8f3cbc04195a465df1fb2aafad71ebbded77cbf08803",
"sha256:9051802d3dae256036cca9e34633a32c0ed1427730d4ebc513dff91ec8b6dd45"
],
"version": "==1.4.1"
},
"google-cloud-bigquery": {
"hashes": [
"sha256:dfb9b2819d5731a42e7e5e003938be7ceda66b40c8ffb67a44073d45aca94b7a",
"sha256:6374a68ef232ae93b6bc364e62c37c9e2bc1fffdd017ea10ffe6a65393f40acb"
],
"version": "==0.31.0"
},
"google-cloud-core": {
"hashes": [
"sha256:0090df83dbc5cb2405fa90844366d13176d1c0b48181c1807ab15f53be403f73",
"sha256:89e8140a288acec20c5e56159461d3afa4073570c9758c05d4e6cb7f2f8cc440"
],
"version": "==0.28.1"
},
"google-resumable-media": {
"hashes": [
"sha256:116de90b9cd483b17c53618ee6a5a20f33e741c648140c8cc9c2141e07616ff1",
"sha256:97de518f8166d442cc0b61fab308bcd319dbb970981e667ec8ded44f5ce49836"
],
"version": "==0.3.1"
},
"googleapis-common-protos": {
"hashes": [
"sha256:c075eddaa2628ab519e01b7d75b76e66c40eaa50fc52758d8225f84708950ef2"
],
"version": "==1.5.3"
},
"idna": {
"hashes": [
"sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4",
"sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"
],
"version": "==2.6"
},
"itsdangerous": {
"hashes": [
"sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519"
],
"version": "==0.24"
},
"jinja2": {
"hashes": [
"sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
"sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
],
"version": "==2.10"
},
"mako": {
"hashes": [
"sha256:4e02fde57bd4abb5ec400181e4c314f56ac3e49ba4fb8b0d50bba18cb27d25ae"
],
"version": "==1.0.7"
},
"markupsafe": {
"hashes": [
"sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
],
"version": "==1.0"
},
"numpy": {
"hashes": [
"sha256:719d914f564f35cce4dc103808f8297c807c9f0297ac183ed81ae8b5650e698e",
"sha256:0f6a5ed0cd7ab1da11f5c07a8ecada73fc55a70ef7bb6311a4109891341d7277",
"sha256:d0928076d9bd8a98de44e79b1abe50c1456e7abbb40af7ef58092086f1a6c729",
"sha256:d858423f5ed444d494b15c4cc90a206e1b8c31354c781ac7584da0d21c09c1c3",
"sha256:20cac3123d791e4bf8482a580d98d6b5969ba348b9d5364df791ba3a666b660d",
"sha256:528ce59ded2008f9e8543e0146acb3a98a9890da00adf8904b1e18c82099418b",
"sha256:56e392b7c738bd70e6f46cf48c8194d3d1dd4c5a59fae4b30c58bb6ef86e5233",
"sha256:99051e03b445117b26028623f1a487112ddf61a09a27e2d25e6bc07d37d94f25",
"sha256:768e777cc1ffdbf97c507f65975c8686ebafe0f3dc8925d02ac117acc4669ce9",
"sha256:675e0f23967ce71067d12b6944add505d5f0a251f819cfb44bdf8ee7072c090d",
"sha256:a958bf9d4834c72dee4f91a0476e7837b8a2966dc6fcfc42c421405f98d0da51",
"sha256:bb370120de6d26004358611441e07acda26840e41dfedc259d7f8cc613f96495",
"sha256:f2b1378b63bdb581d5d7af2ec0373c8d40d651941d283a2afd7fc71184b3f570",
"sha256:a1413d06abfa942ca0553bf3bccaff5fdb36d55b84f2248e36228db871147dab",
"sha256:7f76d406c6b998d6410198dcb82688dcdaec7d846aa87e263ccf52efdcfeba30",
"sha256:a7157c9ac6bddd2908c35ef099e4b643bc0e0ebb4d653deb54891d29258dd329",
"sha256:0fd65cbbfdbf76bbf80c445d923b3accefea0fe2c2082049e0ce947c81fe1d3f",
"sha256:8c18ee4dddd5c6a811930c0a7c7947bf16387da3b394725f6063f1366311187d",
"sha256:0739146eaf4985962f07c62f7133aca89f3a600faac891ce6c7f3a1e2afe5272",
"sha256:07e21f14490324cc1160db101e9b6c1233c33985af4cb1d301dd02650fea1d7f",
"sha256:e6120d63b50e2248219f53302af7ec6fa2a42ed1f37e9cda2c76dbaca65036a7",
"sha256:6be6b0ca705321c178c9858e5ad5611af664bbdfae1df1541f938a840a103888",
"sha256:facc6f925c3099ac01a1f03758100772560a0b020fb9d70f210404be08006bcb"
],
"version": "==1.14.2"
},
"pandas": {
"hashes": [
"sha256:68ac484e857dcbbd07ea7c6f516cc67f7f143f5313d9bc661470e7f473528882",
"sha256:12f2a19d0b0adf31170d98d0e8bcbc59add0965a9b0c65d39e0665400491c0c5",
"sha256:68b121d13177f5128a4c118bb4f73ba40df28292c038389961aa55ea5a996427",
"sha256:06efae5c00b9f4c6e6d3fe1eb52e590ff0ea8e5cb58032c724e04d31c540de53",
"sha256:02541a4fdd31315f213a5c8e18708abad719ee03eda05f603c4fe973e9b9d770",
"sha256:2907f3fe91ca2119ac3c38de6891bbbc83333bfe0d98309768fee28de563ee7a",
"sha256:052a66f58783a59ea38fdfee25de083b107baa81fdbe38fabd169d0f9efce2bf",
"sha256:244ae0b9e998cfa88452a49b20e29bf582cc7c0e69093876d505aec4f8e1c7fe",
"sha256:66403162c8b45325a995493bdd78ad4d8be085e527d721dbfa773d56fbba9c88",
"sha256:af0dbac881f6f87acd325415adea0ce8cccf28f5d4ad7a54b6a1e176e2f7bf70",
"sha256:c2cd884794924687edbaad40d18ac984054d247bb877890932c4d41e3c3aba31",
"sha256:c372db80a5bcb143c9cb254d50f902772c3b093a4f965275197ec2d2184b1e61",
"sha256:97c8223d42d43d86ca359a57b4702ca0529c6553e83d736e93a5699951f0f8db",
"sha256:587a9816cc663c958fcff7907c553b73fe196604f990bc98e1b71ebf07e45b44",
"sha256:44a94091dd71f05922eec661638ec1a35f26d573c119aa2fad964f10a2880e6c"
],
"version": "==0.22.0"
},
"protobuf": {
"hashes": [
"sha256:ac0067e3c60737865ed72bb7416e02297d229d960902802d874c0e167128c809",
"sha256:5c1c8f6a0a68a874e3beff89255959dd80fad45870e96c88944a1b81a22dd5f5",
"sha256:7c193e6964e752bd056735594826c5b03274ceb8f07349d3ae47d9766250ba96",
"sha256:bcfa99f5a82f5eaaf6e5cee5bfdca5a1670f5740aec1d93dae170645ed1a16b0",
"sha256:e269ab7a50bf0fa6fe6a88ea7dcc7a1079ae9450d9ab9b7730ac32916d55508b",
"sha256:01ccd6d03449ae75b779fb5bf4ed62177d61afe3c5e6465ccf3f8b2e1a84afbe",
"sha256:628a3bf0794a8b3cabb18db11eb67cc10e0cc6e5525d557ae7b682bb73fa2018",
"sha256:242e4c7ae565267a8bc8b92d707177f915607ea4bd73244bec6cbf4a49b96661",
"sha256:e7fd33a3474cbe18fd5b5620784a0fa21fcae3e402b1806e29c6b450c7f61706",
"sha256:cc94079ae6cbcea5ae194464a30f3223f075e06a0446f52bca9ddbeb6e9f412a",
"sha256:7222d6616108b33ad6cbeff8117062a73c43cdc8fa8f64f6a322ebeb663e710e",
"sha256:3f655e1f99c3e14d56ca900af1b9a4715b691319a295cc38939d7f77eabd5e7c",
"sha256:76ef6ca3c50e4cfd044861586d5f1b352e0fe7f17f883df6c165bad5b4d0e10a",
"sha256:560a38e692a69957a70ba0e5839aa67430efd63072bf91b0539dac19055694cd",
"sha256:d5d9edfdc5a3a01d06062d677b121081629782edf0e05ca1be14f15bb947eeee",
"sha256:869e12bcfb5759e683f53ec1dd6155b7be034065431da289f0cb4510040a0799",
"sha256:905414e5ea6cdb78d8730f66335755152b46685fcb9fc2f2134024e3ea9e8dcc",
"sha256:adf716a89c9cc1891ead79a861c427071ef59172f0e11967b00565a9547b3bd0",
"sha256:1d92cc30b0b46cced33adde5853d920179eb5ea8eecdee9552502a7f29cc3f21",
"sha256:3b60685732bd0cbdc802dfcb6071efbcf5d927ce3127c13c33ea1a8efae3aa76"
],
"version": "==3.5.2.post1"
},
"psycopg2": {
"hashes": [
"sha256:aeaba399254ca79c299d9fe6aa811d3c3eac61458dee10270de7f4e71c624998",
"sha256:1d90379d01d0dc50ae9b40c863933d87ff82d51dd7d52cea5d1cb7019afd72cd",
"sha256:36030ca7f4b4519ee4f52a74edc4ec73c75abfb6ea1d80ac7480953d1c0aa3c3",
"sha256:7cbc3b21ce2f681ca9ad2d8c0901090b23a30c955e980ebf1006d41f37068a95",
"sha256:b178e0923c93393e16646155794521e063ec17b7cc9f943f15b7d4b39776ea2c",
"sha256:fe6a7f87356116f5ea840c65b032af17deef0e1a5c34013a2962dd6f99b860dd",
"sha256:6f302c486132f8dd11f143e919e236ea4467d53bf18c451cac577e6988ecbd05",
"sha256:888bba7841116e529f407f15c6d28fe3ef0760df8c45257442ec2f14f161c871",
"sha256:932a4c101af007cb3132b1f8a9ffef23386acc53dad46536dc5ba43a3235ae02",
"sha256:179c52eb870110a8c1b460c86d4f696d58510ea025602cd3f81453746fccb94f",
"sha256:33f9e1032095e1436fa9ec424abcbd4c170da934fb70e391c5d78275d0307c75",
"sha256:092a80da1b052a181b6e6c765849c9b32d46c5dac3b81bf8c9b83e697f3cdbe8",
"sha256:f3d3a88128f0c219bdc5b2d9ccd496517199660cea021c560a3252116df91cbd",
"sha256:19983b77ec1fc2a210092aa0333ee48811fd9fb5f194c6cd5b927ed409aea5f8",
"sha256:027ae518d0e3b8fff41990e598bc7774c3d08a3a20e9ecc0b59fb2aaaf152f7f",
"sha256:363fbbf4189722fc46779be1fad2597e2c40b3f577dc618f353a46391cf5d235",
"sha256:d74cf9234ba76426add5e123449be08993a9b13ff434c6efa3a07caa305a619f",
"sha256:32702e3bd8bfe12b36226ba9846ed9e22336fc4bd710039d594b36bd432ae255",
"sha256:8eb94c0625c529215b53c08fb4e461546e2f3fc96a49c13d5474b5ad7aeab6cf",
"sha256:8ebba5314c609a05c6955e5773c7e0e57b8dd817e4f751f30de729be58fa5e78",
"sha256:27467fd5af1dcc0a82d72927113b8f92da8f44b2efbdb8906bd76face95b596d",
"sha256:b68e89bb086a9476fa85298caab43f92d0a6af135a5f433d1f6b6d82cafa7b55",
"sha256:0b9851e798bae024ed1a2a6377a8dab4b8a128a56ed406f572f9f06194e4b275",
"sha256:733166464598c239323142c071fa4c9b91c14359176e5ae7e202db6bcc1d2eb5",
"sha256:ad75fe10bea19ad2188c5cb5fc4cdf53ee808d9b44578c94a3cd1e9fc2beb656",
"sha256:8966829cb0d21a08a3c5ac971a2eb67c3927ae27c247300a8476554cc0ce2ae8",
"sha256:8bf51191d60f6987482ef0cfe8511bbf4877a5aa7f313d7b488b53189cf26209"
],
"version": "==2.7.4"
},
"pyasn1": {
"hashes": [
"sha256:f81c96761fca60d64b1c9b79ec2e40cf9495a745cf570613079ef324aeb9672b",
"sha256:7d626683e3d792cccc608da02498aff37ab4f3dafd8905d6bf755d11f9b26b43",
"sha256:e85895087905c65b5b594eb91f7522664c85545b147d5f4d4e7b1b07da8dcbdc",
"sha256:5a0db897b311d265cde49615cf783f1c78613138605cdd0f907ecfa5b2aba3ee",
"sha256:d5cd6ed995dba16fad0c521cfe31cd2d68400b53fcc2bce93326829be73ab6d1",
"sha256:a7efe807c4b83a859e2735c692b92ed7b567cfddc4163763412920041d876c2b",
"sha256:b5a9ca48055b9a20f6d1b3d68e38692e5431c86a0f99ea602e61294e891fee5b",
"sha256:c07d6e587b2f928366b1f67c09bda026a3e6fcc99e80a744dc67f8fca3895626",
"sha256:d84c2aea3cf43780e9e6a19f4e4dddee9f6976519020e64e47c57e5c7a8c3dd2",
"sha256:758cb50abddc03e4563fd9e7f03db56e3e87b58c0bd01247360326e5c0c7ffa5",
"sha256:0d7f6e959fe53f3960a23d73f35e1fce61348b30915b6664309ca756de7c1f89",
"sha256:d258b0a71994f7770599835249cece1caef3c70def868c4915e6e5ca49b67d15"
],
"version": "==0.4.2"
},
"pyasn1-modules": {
"hashes": [
"sha256:b1f395cae2d669e0830cb023aa86f9f283b7a9aa32317d7f80d8e78aa2745812",
"sha256:854700bbdd01394e2ada9c1bfbd0ed9f5d0c551350dbbd023e88b11d2771ae06",
"sha256:598a6004ec26a8ab40a39ea955068cf2a3949ad9c0030da970f2e1ca4c9f1cc9",
"sha256:f53fe5bcebdf318f51399b250fe8325ef3a26d927f012cc0c8e0f9e9af7f9deb",
"sha256:47fb6757ab78fe966e7c58b2030b546854f78416d653163f0ce9290cf2278e8b",
"sha256:041e9fbafac548d095f5b6c3b328b80792f006196e15a232b731a83c93d59493",
"sha256:0cea139045c38f84abaa803bcb4b5e8775ea12a42af10019d942f227acc426c3",
"sha256:0cdca76a68dcb701fff58c397de0ef9922b472b1cb3ea9695ca19d03f1869787",
"sha256:72fd8b0c11191da088147c6e4678ec53e573923ecf60b57eeac9e97433e09fc2",
"sha256:c6747146e95d2b14cc2a8399b2b0bde3f93778f8f9ec704690d2b589c376c137",
"sha256:0f2e50d20bc670be170966638fa0ae603f0bc9ed6ebe8e97a6d1d4cef30cc889",
"sha256:af00ea8f2022b6287dc375b2c70f31ab5af83989fc6fe9eacd4976ce26cd7ccc"
],
"version": "==0.2.1"
},
"python-dateutil": {
"hashes": [
"sha256:3220490fb9741e2342e1cf29a503394fdac874bc39568288717ee67047ff29df",
"sha256:9d8074be4c993fbe4947878ce593052f71dac82932a677d49194d8ce9778002e"
],
"version": "==2.7.2"
},
"python-editor": {
"hashes": [
"sha256:a3c066acee22a1c94f63938341d4fb374e3fdd69366ed6603d7b24bed1efc565"
],
"version": "==1.0.3"
},
"pytz": {
"hashes": [
"sha256:ed6509d9af298b7995d69a440e2822288f2eca1681b8cce37673dbb10091e5fe",
"sha256:f93ddcdd6342f94cea379c73cddb5724e0d6d0a1c91c9bdef364dc0368ba4fda",
"sha256:61242a9abc626379574a166dc0e96a66cd7c3b27fc10868003fa210be4bff1c9",
"sha256:ba18e6a243b3625513d85239b3e49055a2f0318466e0b8a92b8fb8ca7ccdf55f",
"sha256:07edfc3d4d2705a20a6e99d97f0c4b61c800b8232dc1c04d87e8554f130148dd",
"sha256:3a47ff71597f821cd84a162e71593004286e5be07a340fd462f0d33a760782b5",
"sha256:5bd55c744e6feaa4d599a6cbd8228b4f8f9ba96de2c38d56f08e534b3c9edf0d",
"sha256:887ab5e5b32e4d0c86efddd3d055c1f363cbaa583beb8da5e22d2fa2f64d51ef",
"sha256:410bcd1d6409026fbaa65d9ed33bf6dd8b1e94a499e32168acfc7b332e4095c0"
],
"version": "==2018.3"
},
"requests": {
"hashes": [
"sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
"sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
],
"version": "==2.18.4"
},
"rsa": {
"hashes": [
"sha256:43f682fea81c452c98d09fc316aae12de6d30c4b5c84226642cf8f8fd1c93abd",
"sha256:25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5"
],
"version": "==3.4.2"
},
"six": {
"hashes": [
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb",
"sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
],
"version": "==1.11.0"
},
"sqlalchemy": {
"hashes": [
"sha256:7cb00cc9b9f92ef8b4391c8a2051f81eeafefe32d63c6b395fd51401e9a39edb"
],
"version": "==1.2.6"
},
"urllib3": {
"hashes": [
"sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
"sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
],
"version": "==1.22"
},
"werkzeug": {
"hashes": [
"sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b",
"sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c"
],
"version": "==0.14.1"
}
},
"develop": {}
}

1
migrations/README Executable file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

45
migrations/alembic.ini Normal file
View File

@@ -0,0 +1,45 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

88
migrations/env.py Executable file
View File

@@ -0,0 +1,88 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
compare_type=True,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

25
migrations/script.py.mako Executable file
View File

@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
# flake8: noqa
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,78 @@
"""initial models
Revision ID: 9116cea0e0d7
Revises:
Create Date: 2018-04-04 23:48:49.351410
"""
# flake8: noqa
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '9116cea0e0d7'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('overall',
sa.Column('date', sa.Date(), nullable=False),
sa.Column('package', sa.String(length=128), nullable=False),
sa.Column('category', sa.String(length=16), nullable=False),
sa.Column('downloads', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('date', 'package', 'category')
)
op.create_table('python_major',
sa.Column('date', sa.Date(), nullable=False),
sa.Column('package', sa.String(length=128), nullable=False),
sa.Column('category', sa.String(length=4), nullable=True),
sa.Column('downloads', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('date', 'package', 'category')
)
op.create_table('python_minor',
sa.Column('date', sa.Date(), nullable=False),
sa.Column('package', sa.String(length=128), nullable=False),
sa.Column('category', sa.String(length=4), nullable=True),
sa.Column('downloads', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('date', 'package', 'category')
)
op.create_table('recent',
sa.Column('package', sa.String(length=128), nullable=False),
sa.Column('category', sa.String(length=8), nullable=False),
sa.Column('downloads', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('package', 'category')
)
op.create_table('system',
sa.Column('date', sa.Date(), nullable=False),
sa.Column('package', sa.String(length=128), nullable=False),
sa.Column('category', sa.String(length=8), nullable=True),
sa.Column('downloads', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('date', 'package', 'category')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=39), nullable=False),
sa.Column('token', sa.String(length=256), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('favorites', postgresql.ARRAY(sa.String(length=128), dimensions=1), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('users')
op.drop_table('system')
op.drop_table('recent')
op.drop_table('python_minor')
op.drop_table('python_major')
op.drop_table('overall')
# ### end Alembic commands ###

View File

@@ -0,0 +1,35 @@
"""change int to bigint for downloads in recent table
Revision ID: c81b3715b9e5
Revises: 9116cea0e0d7
Create Date: 2018-04-05 00:56:02.276823
"""
# flake8: noqa
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c81b3715b9e5'
down_revision = '9116cea0e0d7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('recent', 'downloads',
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('recent', 'downloads',
existing_type=sa.BigInteger(),
type_=sa.INTEGER(),
existing_nullable=False)
# ### end Alembic commands ###

0
pypistats/__init__.py Normal file
View File

37
pypistats/application.py Normal file
View File

@@ -0,0 +1,37 @@
"""PyPIStats application."""
from flask import Flask
from pypistats import views
from pypistats.extensions import db
from pypistats.extensions import github
from pypistats.extensions import migrate
from pypistats.settings import DevConfig
from pypistats.settings import ProdConfig
from pypistats.settings import TestConfig
def create_app(config_object=DevConfig):
"""Create the application.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
db.init_app(app)
github.init_app(app)
migrate.init_app(app, db)
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(views.api.blueprint)
app.register_blueprint(views.error.blueprint)
app.register_blueprint(views.general.blueprint)
app.register_blueprint(views.user.blueprint)

62
pypistats/database.py Normal file
View File

@@ -0,0 +1,62 @@
"""Database classes and models."""
from pypistats.extensions import db
Column = db.Column
basestring = (str, bytes)
class CRUDMixin(object):
"""Mixin that adds convenience methods for CRUD operations."""
@classmethod
def create(cls, **kwargs):
"""Create a new record and save it the database."""
instance = cls(**kwargs)
return instance.save()
def update(self, commit=True, **kwargs):
"""Update specific fields of a record."""
for attr, value in kwargs.items():
setattr(self, attr, value)
return commit and self.save() or self
def save(self, commit=True):
"""Save the record."""
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit=True):
"""Remove the record from the database."""
db.session.delete(self)
return commit and db.session.commit()
class Model(CRUDMixin, db.Model):
"""Base model class that includes CRUD convenience methods."""
__abstract__ = True
class SurrogatePK(object):
"""A mixin that adds a surrogate integer 'primary key' column.
Adds a surrogate integer 'primary key' columnnamed ``id`` to any
declarative-mapped class.
"""
__table_args__ = {'extend_existing': True}
id = Column(db.Integer, primary_key=True)
@classmethod
def get_by_id(cls, record_id):
"""Get record by ID."""
if any(
(isinstance(record_id, basestring) and record_id.isdigit(),
isinstance(record_id, (int, float))),
):
return cls.query.get(int(record_id))
return None

9
pypistats/extensions.py Normal file
View File

@@ -0,0 +1,9 @@
"""Flask extensions."""
from flask_github import GitHub
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
github = GitHub()
migrate = Migrate()

View File

View File

@@ -0,0 +1,88 @@
"""Package stats tables."""
from pypistats.database import Column
from pypistats.database import Model
from pypistats.extensions import db
class OverallDownloadCount(Model):
"""Overall download counts."""
__tablename__ = "overall"
date = Column(db.Date, primary_key=True, nullable=False)
package = Column(db.String(128), primary_key=True, nullable=False)
# with_mirrors or without_mirrors
category = Column(db.String(16), primary_key=True, nullable=False)
downloads = Column(db.Integer(), nullable=False)
def __repr__(self):
return "<OverallDownloadCount {}".format(
f"{str(self.date)} - {str(self.package)} - {str(self.category)}"
)
class PythonMajorDownloadCount(Model):
"""Download counts by python major version."""
__tablename__ = "python_major"
date = Column(db.Date, primary_key=True, nullable=False)
package = Column(db.String(128), primary_key=True, nullable=False)
# python_major version, 2 or 3 (or null)
category = Column(db.String(4), primary_key=True, nullable=True)
downloads = Column(db.Integer(), nullable=False)
def __repr__(self):
return "<PythonMajorDownloadCount {}".format(
f"{str(self.date)} - {str(self.package)} - {str(self.category)}"
)
class PythonMinorDownloadCount(Model):
"""Download counts by python minor version."""
__tablename__ = "python_minor"
date = Column(db.Date, primary_key=True)
package = Column(db.String(128), primary_key=True, nullable=False)
# python_minor version, e.g. 2.7 or 3.6 (or null)
category = Column(db.String(4), primary_key=True, nullable=True)
downloads = Column(db.Integer(), nullable=False)
def __repr__(self):
return "<PythonMinorDownloadCount {}".format(
f"{str(self.date)} - {str(self.package)} - {str(self.category)}"
)
class RecentDownloadCount(Model):
"""Recent day/week/month download counts."""
__tablename__ = "recent"
package = Column(db.String(128), primary_key=True, nullable=False)
# recency, e.g. day, week, month
category = Column(db.String(8), primary_key=True, nullable=False)
downloads = Column(db.BigInteger(), nullable=False)
def __repr__(self):
return "<RecentDownloadCount {}".format(
f"{str(self.package)} - {str(self.category)}"
)
class SystemDownloadCount(Model):
"""Download counts by system."""
__tablename__ = "system"
date = Column(db.Date, primary_key=True)
package = Column(db.String(128), primary_key=True, nullable=False)
# system, e.g. Windows or Linux or Darwin (or null)
category = Column(db.String(8), primary_key=True, nullable=True)
downloads = Column(db.Integer(), nullable=False)
def __repr__(self):
return "<SystemDownloadCount {}".format(
f"{str(self.date)} - {str(self.package)} - {str(self.category)}"
)

32
pypistats/models/user.py Normal file
View File

@@ -0,0 +1,32 @@
"""User tables."""
import datetime
from flask_login import UserMixin
from sqlalchemy.dialects.postgresql import ARRAY
from pypistats.database import Column
from pypistats.database import Model
from pypistats.database import SurrogatePK
from pypistats.extensions import db
class User(UserMixin, SurrogatePK, Model):
"""A user of the app."""
__tablename__ = 'users'
username = Column(db.String(39), unique=True, nullable=False)
# icon
token = Column(db.String(256))
created_at = Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
active = Column(db.Boolean(), default=False)
is_admin = Column(db.Boolean(), default=False)
favorites = Column(ARRAY(db.String(128), dimensions=1))
def __init__(self, token, **kwargs):
"""Create instance."""
db.Model.__init__(self, token=token, **kwargs)
def __repr__(self):
"""Represent instance as a unique string."""
return f"<User({self.username})>"

8
pypistats/run.py Normal file
View File

@@ -0,0 +1,8 @@
"""Run the application."""
from pypistats.application import create_app
from pypistats.settings import DevConfig
from pypistats.settings import ProdConfig
from pypistats.settings import TestConfig
app = create_app(DevConfig)

View File

@@ -0,0 +1 @@
from .secret import *

53
pypistats/settings.py Normal file
View File

@@ -0,0 +1,53 @@
"""Application configuration."""
import os
from pypistats.secret import postgresql
def get_db_uri(env):
"""Get the database URI."""
return \
"postgresql://{username}:{password}@{host}:{port}/{dbname}".format(
username=postgresql[env]["username"],
password=postgresql[env]["password"],
host=postgresql[env]["host"],
port=postgresql[env]["port"],
dbname=postgresql[env]["dbname"],
)
class Config(object):
"""Base configuration."""
SECRET_KEY = os.environ.get("PYPISTATS_SECRET", "secret-key")
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
GITHUB_CLIENT_ID = "test"
GITHUB_CLIENT_SECRET = "test"
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProdConfig(Config):
"""Production configuration."""
ENV = "prod"
DEBUG = False
SQLALCHEMY_DATABASE_URI = get_db_uri(ENV)
class DevConfig(Config):
"""Development configuration."""
ENV = "dev"
DEBUG = True
SQLALCHEMY_DATABASE_URI = get_db_uri(ENV)
class TestConfig(Config):
"""Test configuration."""
ENV = "dev"
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = get_db_uri(ENV)
WTF_CSRF_ENABLED = False # Allows form testing

View File

53
pypistats/tasks/db.py Normal file
View File

@@ -0,0 +1,53 @@
"""Database tasks."""
import psycopg2
from sqlalchemy import create_engine
from sqlalchemy.exc import ProgrammingError
# from pypistats.extensions import db
from pypistats.secret import postgresql
DBNAME = "pypistats"
def create_databases():
"""Create the databases for each environment."""
env = "prod"
url = \
"postgresql://{username}:{password}@{host}:{port}/{dbname}".format(
username=postgresql[env]["username"],
password=postgresql[env]["password"],
host=postgresql[env]["host"],
port=postgresql[env]["port"],
dbname=DBNAME,
)
engine = create_engine(url)
connection = engine.connect()
for env, config in postgresql.items():
query = f"""CREATE DATABASE {config["dbname"]}"""
try:
connection.execute("commit")
connection.execute(query)
connection.execute("commit")
print(f"Created db: {config['dbname']}.")
except ProgrammingError:
print(f"Database {config['dbname']} already exists.")
def get_db_connection(env="dev"):
"""Get a db connection cursor."""
connection = psycopg2.connect(
dbname=postgresql[env]['dbname'],
user=postgresql[env]['username'],
password=postgresql[env]['password'],
host=postgresql[env]['host'],
port=postgresql[env]['port'],
# sslmode='require',
)
cursor = connection.cursor()
return cursor
if __name__ == "__main__":
create_databases()

323
pypistats/tasks/pypi.py Normal file
View File

@@ -0,0 +1,323 @@
"""Get the download stats for a specific day."""
import datetime
import os
# import sys
# from google.api_core.exceptions import Conflict
from google.cloud import bigquery
import pandas as pd
import psycopg2
from psycopg2.extras import execute_values
from pypistats.secret import postgresql
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = \
os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
"secret",
"secret.json",
)
# Mirrors to disregard when considering downloads
MIRRORS = ("bandersnatch", "z3c.pypimirror", "Artifactory", "devpi")
# PyPI systems
SYSTEMS = ("Windows", "Linux", "Darwin")
# BigQuery definitions
DATASET_ID = "pypistats"
TABLE_ID = "pypistats"
SCHEMA = [
bigquery.SchemaField("package", "STRING", mode="REQUIRED"),
bigquery.SchemaField("category_label", "STRING", mode="REQUIRED"),
bigquery.SchemaField("category", "STRING", mode="NULLABLE"),
bigquery.SchemaField("downloads", "INTEGER", mode="NULLABLE"),
]
def get_daily_download_stats(date, env="dev"):
"""Get daily download stats for pypi packages from BigQuery."""
job_config = bigquery.QueryJobConfig()
bq_client = bigquery.Client()
# # Prepare a reference to the new dataset
# dataset_ref = bq_client.dataset(DATASET_ID)
# dataset = bigquery.Dataset(dataset_ref)
#
# # Create the dataset
# try:
# dataset = bq_client.create_dataset(dataset)
# except Conflict:
# pass
#
# # Prepare a reference to the table
# table_ref = dataset_ref.table(TABLE_ID)
# table = bigquery.Table(table_ref, schema=SCHEMA)
#
# # Create the table
# try:
# table = bq_client.create_table(table)
# except Conflict:
# pass
local = False
if env == "dev":
try:
print("Loading from csv...")
df = pd.read_csv("ignore/sample_data.csv", index_col=0)
print("Done.")
# print(set(df["category_label"].values))
# sys.exit()
local = True
except Exception:
print("Loading failed.")
if not local:
print("Querying BigQuery...")
# Get and perform the query, writing to destination table
query = get_query(date)
print("Done.")
# job_config.destination = table_ref
# job_config.write_disposition = "WRITE_TRUNCATE"
query_job = bq_client.query(query, job_config=job_config)
iterator = query_job.result()
rows = list(iterator)
data = []
for row in rows:
data.append((
date,
row['package'],
row['category_label'],
row['category'],
row['downloads']
))
df = pd.DataFrame(data, columns=[
"date",
"package",
"category_label",
"category",
"downloads",
])
df.to_csv("ignore/sample_data.csv")
return update_db(df, env)
def update_db(df, env="dev"):
"""Update the db for the table."""
connection = psycopg2.connect(
dbname=postgresql[env]['dbname'],
user=postgresql[env]['username'],
password=postgresql[env]['password'],
host=postgresql[env]['host'],
port=postgresql[env]['port'],
# sslmode='require',
)
cursor = connection.cursor()
df_groups = df.groupby("category_label")
success = {}
for category_label, df_category in df_groups:
table = category_label
df_category = df_category[[
"date",
"package",
"category",
"downloads",
]]
# success[table] = update_table(cursor, table, df_category, date)
update_all_package_stats(cursor, table, date)
update_recent_stats(cursor, date)
return success
def update_table(cursor, table, df, date):
"""Update a table."""
print(table)
df = df.fillna("null")
delete_query = \
f"""DELETE FROM {table}
WHERE date = '{date}'"""
insert_query = \
f"""INSERT INTO {table} (date, package, category, downloads)
VALUES %s"""
values = list(df.itertuples(index=False, name=None))
try:
cursor.execute(delete_query)
execute_values(cursor, insert_query, values)
cursor.execute("commit")
return True
except psycopg2.IntegrityError as e:
cursor.execute("rollback")
return False
def update_all_package_stats(cursor, table, date):
"""Update stats for __all__ packages."""
print("__all__")
aggregate_query = \
f"""SELECT date, '__all__' AS package, category, sum(downloads) AS downloads
FROM {table} GROUP BY date, category"""
cursor.execute(aggregate_query, (table,))
values = cursor.fetchall()
delete_query = \
f"""DELETE FROM {table}
WHERE date = '{date}' and package = '__all__'"""
insert_query = \
f"""INSERT INTO {table} (date, package, category, downloads)
VALUES %s"""
try:
cursor.execute(delete_query)
execute_values(cursor, insert_query, values)
cursor.execute("commit")
return True
except psycopg2.IntegrityError as e:
cursor.execute("rollback")
return False
def update_recent_stats(cursor, date):
"""Update daily, weekly, monthly stats for all packages."""
print("recent")
downloads_table = "overall"
recent_table = "recent"
date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
date_week = date - datetime.timedelta(days=7)
date_month = date - datetime.timedelta(days=30)
where = {
"day": f"date = '{str(date)}'",
"week": f"date > '{str(date_week)}'",
"month": f"date > '{str(date_month)}'",
}
success = {}
for time, clause in where.items():
select_query = \
f"""SELECT package, '{time}' as category, sum(downloads) AS downloads
FROM {downloads_table}
WHERE category = 'without_mirrors' and {clause}
GROUP BY package"""
cursor.execute(select_query)
values = cursor.fetchall()
delete_query = \
f"""DELETE FROM {recent_table}
WHERE category = '{time}'"""
insert_query = \
f"""INSERT INTO {recent_table}
(package, category, downloads) VALUES %s"""
try:
cursor.execute(delete_query)
execute_values(cursor, insert_query, values)
cursor.execute("commit")
success[time] = True
except psycopg2.IntegrityError as e:
cursor.execute("rollback")
success[time] = False
def get_query(date):
"""Get the query to execute against pypistats on bigquery."""
return f"""
WITH
dls AS (
SELECT
country_code,
file.project AS package,
file.version AS package_version,
file.type AS file_type,
details.installer.name AS installer,
details.python AS python_version,
details.implementation.name AS python_implementation,
details.distro.name AS distro,
details.system.name AS system
FROM
`the-psf.pypi.downloads{date.replace("-", "")}` )
SELECT
package,
'python_major' AS category_label,
SPLIT(python_version, '.')[
OFFSET
(0)] AS category,
COUNT(*) AS downloads
FROM
dls
WHERE
installer NOT IN {str(MIRRORS)}
GROUP BY
package,
category
UNION ALL
SELECT
package,
'python_minor' AS category_label,
CONCAT(SPLIT(python_version, '.')[
OFFSET
(0)],'.',SPLIT(python_version, '.')[
OFFSET
(1)]) AS category,
COUNT(*) AS downloads
FROM
dls
WHERE
installer NOT IN {str(MIRRORS)}
GROUP BY
package,
category
UNION ALL
SELECT
package,
'overall' AS category_label,
'with_mirrors' AS category,
COUNT(*) AS downloads
FROM
dls
GROUP BY
package,
category
UNION ALL
SELECT
package,
'overall' AS category_label,
'without_mirrors' AS category,
COUNT(*) AS downloads
FROM
dls
WHERE
installer NOT IN {str(MIRRORS)}
GROUP BY
package,
category
UNION ALL
SELECT
package,
'system' AS category_label,
CASE
WHEN system NOT IN {str(SYSTEMS)} THEN 'other'
ELSE system
END AS category,
COUNT(*) AS downloads
FROM
dls
WHERE
installer NOT IN {str(MIRRORS)}
GROUP BY
package,
category
"""
if __name__ == "__main__":
date = "2018-02-08"
print(get_daily_download_stats(date))

View File

@@ -0,0 +1,6 @@
"""The view blueprint modules."""
# flake8: noqa
from pypistats.views import api
from pypistats.views import error
from pypistats.views import general
from pypistats.views import user

133
pypistats/views/api.py Normal file
View File

@@ -0,0 +1,133 @@
"""JSON API routes."""
from flask import abort
from flask import Blueprint
from flask import jsonify
from flask import request
from pypistats.models.download import OverallDownloadCount
from pypistats.models.download import PythonMajorDownloadCount
from pypistats.models.download import PythonMinorDownloadCount
from pypistats.models.download import RecentDownloadCount
from pypistats.models.download import SystemDownloadCount
blueprint = Blueprint('api', __name__, url_prefix='/api')
@blueprint.route("/<package>/recent")
def api_downloads_recent(package):
"""Get the recent downloads of a package."""
category = request.args.get('period')
if category is None:
downloads = RecentDownloadCount.query.filter_by(package=package).all()
elif category in ("day", "week", "month"):
downloads = RecentDownloadCount.query.filter_by(package=package, category=category).first()
else:
abort(404)
response = {"package": package, "type": "recent_downloads"}
if len(downloads) > 0:
response["data"] = {
r.category: r.downloads for r in downloads
}
else:
abort(404)
return jsonify(response)
@blueprint.route("/<package>/overall")
def api_downloads_overall(package):
"""Get the overall download time series of a package."""
mirrors = request.args.get('mirrors')
if mirrors == 'true':
downloads = OverallDownloadCount.query.\
filter_by(package=package, category="with_mirrors").\
order_by(OverallDownloadCount.date).all()
elif mirrors == 'false':
downloads = OverallDownloadCount.query.\
filter_by(package=package, category="without_mirrors").\
order_by(OverallDownloadCount.date).all()
else:
downloads = OverallDownloadCount.query.\
filter_by(package=package).\
order_by(OverallDownloadCount.category, OverallDownloadCount.date).all()
response = {"package": package, "type": "overall_downloads"}
if len(downloads) > 0:
response["data"] = [{
"date": str(r.date),
"category": r.category,
"downloads": r.downloads,
} for r in downloads]
else:
abort(404)
return jsonify(response)
@blueprint.route("/<package>/python_major")
def api_downloads_python_major(package):
"""Get the python major download time series of a package."""
return generic_downloads(PythonMajorDownloadCount, package, "version", "python_major")
@blueprint.route("/<package>/python_minor")
def api_downloads_python_minor(package):
"""Get the python minor download time series of a package."""
return generic_downloads(PythonMinorDownloadCount, package, "version", "python_minor")
@blueprint.route("/<package>/system")
def api_downloads_system(package):
"""Get the system download time series of a package."""
return generic_downloads(SystemDownloadCount, package, "os", "system")
def generic_downloads(model, package, arg, name):
"""Generate a generic response."""
category = request.args.get(f"{arg}")
if category is not None:
downloads = model.query.\
filter_by(package=package, category=category).\
order_by(model.date).all()
else:
downloads = model.query.\
filter_by(package=package).\
order_by(model.category, model.date).all()
response = {"package": package, "type": f"{name}_downloads"}
if downloads is not None:
response["data"] = [{
"date": str(r.date),
"category": r.category,
"downloads": r.downloads,
} for r in downloads]
else:
abort(404)
return jsonify(response)
@blueprint.route("/top/overall")
def api_top_packages():
"""Get the most downloaded packages by recency."""
return "top overall"
@blueprint.route("/top/python_major")
def api_top_python_major():
"""Get the most downloaded packages by python major version."""
return "top python_major"
@blueprint.route("/top/python_minor")
def api_top_python_minor():
"""Get the most downloaded packages by python minor version."""
return "top python_minor"
@blueprint.route("/top/system")
def api_top_system():
"""Get the most downloaded packages by system."""
return "top python_minor"

28
pypistats/views/error.py Normal file
View File

@@ -0,0 +1,28 @@
"""Error page handlers."""
from flask import Blueprint
blueprint = Blueprint('error', __name__, template_folder='templates')
@blueprint.app_errorhandler(400)
def handle_400(err):
"""Return 400."""
return "400"
@blueprint.app_errorhandler(401)
def handle_401(err):
"""Return 401."""
return "401"
@blueprint.app_errorhandler(404)
def handle_404(err):
"""Return 404."""
return "404"
@blueprint.app_errorhandler(500)
def handle_500(err):
"""Return 500."""
return "500"

View File

@@ -0,0 +1,40 @@
"""General pages."""
from flask import Blueprint
from pypistats.models.download import OverallDownloadCount
from pypistats.models.download import PythonMajorDownloadCount
from pypistats.models.download import PythonMinorDownloadCount
from pypistats.models.download import SystemDownloadCount
blueprint = Blueprint('general', __name__, template_folder='templates')
@blueprint.route("/")
def index():
"""Render the home page."""
return "PYPISTATS!"
@blueprint.route("/about")
def about():
"""Render the about page."""
return "About this website."
@blueprint.route("/<package>")
def package(package):
"""Render the package page."""
return package + ' main page'
@blueprint.route("/top")
def top():
"""Render the top packages page."""
return 'top stats'
@blueprint.route("/status")
def status():
"""Return OK."""
return "OK"

66
pypistats/views/user.py Normal file
View File

@@ -0,0 +1,66 @@
"""User page for tracking packages."""
from flask import Blueprint
from flask import flash
from flask import g
from flask import redirect
from flask import request
from flask import session
from flask import url_for
from pypistats.extensions import db
from pypistats.extensions import github
from pypistats.models.user import User
blueprint = Blueprint('user', __name__, template_folder='templates')
@blueprint.route("/user/<user>")
def user(user):
"""Render the user's personal page."""
return user + "'s page"
@blueprint.route("/user/<user>/package/<package>", methods=['POST', 'DELETE'])
def user_package(user):
"""Handle adding and deleting packages to user's list."""
return "SOMETHING"
@blueprint.route('/login')
def login():
"""Login."""
return github.authorize()
@blueprint.route('/logout')
def logout():
"""Logout."""
session.pop('user_id', None)
return redirect(url_for('index'))
@blueprint.route('/github-callback')
@github.authorized_handler
def authorized(oauth_token):
"""Github authorization callback."""
next_url = request.args.get('next') or url_for('index')
if oauth_token is None:
flash("Authorization failed.")
return redirect(next_url)
user = User.query.filter_by(token=oauth_token).first()
if user is None:
user = User(oauth_token)
db.add(user)
user.github_access_token = oauth_token
db.commit()
return redirect(next_url)
@github.access_token_getter
def token_getter():
"""Get the token for a user."""
user = g.user
if user is not None:
return user.github_access_token

3
run.sh Normal file
View File

@@ -0,0 +1,3 @@
export FLASK_APP=pypistats/run.py
export FLASK_DEBUG=1
flask run