diff --git a/iso/empanadas/poetry.lock b/iso/empanadas/poetry.lock index 5b446f7..2e0834c 100644 --- a/iso/empanadas/poetry.lock +++ b/iso/empanadas/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "atomicwrites" @@ -31,17 +31,17 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "boto3" -version = "1.34.110" +version = "1.34.159" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.110-py3-none-any.whl", hash = "sha256:2fc871b4a5090716c7a71af52c462e539529227f4d4888fd04896d5028f9cedc"}, - {file = "boto3-1.34.110.tar.gz", hash = "sha256:83ffe2273da7bdfdb480d85b0705f04e95bd110e9741f23328b7c76c03e6d53c"}, + {file = "boto3-1.34.159-py3-none-any.whl", hash = "sha256:21120d23cc37c0e80dc4f64434bc5664d2a5645dcd9bf8a8fa97ed5c82164ca0"}, + {file = "boto3-1.34.159.tar.gz", hash = "sha256:ffe7bbb88ba81b5d54bc8fa0cfb2f3b7fe63a6cffa0f9207df2ef5c22a1c0587"}, ] [package.dependencies] -botocore = ">=1.34.110,<1.35.0" +botocore = ">=1.34.159,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -50,13 +50,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.110" +version = "1.34.159" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.110-py3-none-any.whl", hash = "sha256:1edf3a825ec0a5edf238b2d42ad23305de11d5a71bb27d6f9a58b7e8862df1b6"}, - {file = "botocore-1.34.110.tar.gz", hash = "sha256:b2c98c40ecf0b1facb9e61ceb7dfa28e61ae2456490554a16c8dbf99f20d6a18"}, + {file = "botocore-1.34.159-py3-none-any.whl", hash = "sha256:7633062491457419a49f5860c014251ae85689f78266a3ce020c2c8688a76b97"}, + {file = "botocore-1.34.159.tar.gz", hash = "sha256:dc28806eb21e3c8d690c422530dff8b4b242ac033cbe98f160a9d37796c09cb1"}, ] [package.dependencies] @@ -68,17 +68,17 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.9)"] +crt = ["awscrt (==0.21.2)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -191,6 +191,38 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] + [[package]] name = "idna" version = "3.7" @@ -333,24 +365,24 @@ files = [ [[package]] name = "more-itertools" -version = "10.2.0" +version = "10.4.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.2.0.tar.gz", hash = "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"}, - {file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"}, + {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"}, + {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, ] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -433,73 +465,75 @@ six = ">=1.5" [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -514,13 +548,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rpm" -version = "0.1.0" +version = "0.2.0" description = "Shim RPM module for use in virtualenvs." optional = false python-versions = ">=3.6" files = [ - {file = "rpm-0.1.0-py3-none-any.whl", hash = "sha256:3664381f6f3cf050e97f49619cf57abacf3e77d68ddd3bfea3e7459c7c5f03f9"}, - {file = "rpm-0.1.0.tar.gz", hash = "sha256:0e320a806fb61c3980c0cd0c5f5faec97c73c347432902ba2955a08a7b1a034f"}, + {file = "rpm-0.2.0-py3-none-any.whl", hash = "sha256:4050b6033f7403be0a34f42a742c49ba74f2b0c6129f0247115b6078b24ddd71"}, + {file = "rpm-0.2.0.tar.gz", hash = "sha256:b92285f65c9ddf77678cb3e51aa67827426408fac34cdd8d537d8c14e3eaffbf"}, ] [package.extras] @@ -528,13 +562,13 @@ testing = ["tox"] [[package]] name = "s3transfer" -version = "0.10.1" +version = "0.10.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, - {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, ] [package.dependencies] @@ -554,15 +588,26 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] @@ -572,13 +617,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -611,20 +656,20 @@ files = [ [[package]] name = "zipp" -version = "3.18.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, - {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "ca397a25d73c157fb24520fb687fe526da59e2bc9e0747521d8bfbaac00bf7d6" +content-hash = "4ef7a9006f0f5485e840d95417574a6dc2f25981fceeaf938690eccd9ce0c73e" diff --git a/iso/empanadas/pyproject.toml b/iso/empanadas/pyproject.toml index af4a853..4381d0e 100644 --- a/iso/empanadas/pyproject.toml +++ b/iso/empanadas/pyproject.toml @@ -17,6 +17,7 @@ xmltodict = "^0.13.0" requests = "^2.32.2" kobo = "^0.36.0" attrs = "^23.2.0" +GitPython = ">=3.1.30" [tool.poetry.dev-dependencies] pytest = "~5" diff --git a/mangle/repoview/repoview-old.py b/mangle/repoview/repoview-old.py new file mode 100644 index 0000000..4b874d0 --- /dev/null +++ b/mangle/repoview/repoview-old.py @@ -0,0 +1,958 @@ +#!/usr/bin/python3 +# -*- mode: Python; indent-tabs-mode: nil; -*- +""" +Repoview is a small utility to generate static HTML pages for a repodata +directory, to make it easily browseable. + +@author: Konstantin Ryabitsev & contributors +@copyright: 2005 by Duke University, 2006-2007 by Konstantin Ryabitsev & co +@license: GPL +""" +## +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Library General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# +# Copyright (C) 2005 by Duke University, http://www.duke.edu/ +# Copyright (C) 2006 by McGill University, http://www.mcgill.ca/ +# Copyright (C) 2007 by Konstantin Ryabitsev and contributors +# Author: Konstantin Ryabitsev +# +#pylint: disable-msg=F0401,W0704 + +import os +import shutil +import sys +import time +from functools import cmp_to_key +import hashlib as md5 +from argparse import ArgumentParser +import jinja2 +from rpm import labelCompare +from xml.etree.ElementTree import fromstring, ElementTree, TreeBuilder +import sqlite3 as sqlite + +## +# Some hardcoded constants +# +PKGKID = 'package.j2' +PKGFILE = '%s.html' +GRPKID = 'group.j2' +GRPFILE = '%s.group.html' +IDXKID = 'index.j2' +IDXFILE = 'index.html' +RSSKID = 'rss.j2' +RSSFILE = 'latest-feed.xml' +ISOFORMAT = '%a, %d %b %Y %H:%M:%S %z' + +VERSION = '0.7.1' +SUPPORTED_DB_VERSION = 10 +DEFAULT_TEMPLATEDIR = '/usr/share/repoview/templates' + +def _mkid(text): + """ + Make a web-friendly filename out of group names and package names. + + @param text: the text to clean up + @type text: str + + @return: a web-friendly filename + @rtype: str + """ + text = text.replace('/', '.') + text = text.replace(' ', '_') + return text + +def _humansize(bytez): + """ + This will return the size in sane units (KiB or MiB). + + @param bytes: number of bytes + @type bytes: int + + @return: human-readable string + @rtype: str + """ + if bytez < 1024: + return '%d Bytes' % bytez + bytez = int(bytez) + kbytes = bytez/1024 + if kbytes/1024 < 1: + return '%d KiB' % kbytes + return '%0.1f MiB' % (float(kbytes)/1024) + + +class Repoview: + """ + The working horse class. + """ + + def __del__(self): + for entry in self.cleanup: + if os.access(entry, os.W_OK): + os.unlink(entry) + + def __init__(self, opts): + """ + @param opts: ArgumentParser's opts + @type opts: ArgumentParser + """ + # list of files to remove at the end of processing + self.cleanup = [] + self.opts = opts + if opts.outdir: + self.outdir = opts.outdir + else: + self.outdir = os.path.join(opts.repodir, 'repoview') + + self.exclude = '1=1' + self.state_data = {} #? + self.written = {} #? + + self.groups = [] + self.letter_groups = [] + + self.pconn = None # primary.sqlite + self.oconn = None # other.sqlite + self.sconn = None # state db + self.fconn = None # filelists.sqlite + + self.setup_repo() + self.setup_outdir() + self.setup_state_db() + self.setup_excludes() + + if not self.groups: + self.setup_rpm_groups() + + letters = self.setup_letter_groups() + + repo_data = { + 'title': opts.title, + 'letters': letters, + 'my_version': VERSION, + 'env': {} + } + + try: + assert opts.env is not None + repo_data['env'] = { + e.split('=', 1)[0]: e.split('=', 1)[1] for e in opts.env + } + except AssertionError: + pass + except IndexError: + sys.stderr.write('invalid environment arguments. Exiting.\n') + sys.exit(1) + + def ymd(stamp): + return time.strftime('%Y-%m-%d', time.localtime(int(stamp))) + + self.j2loader = jinja2.FileSystemLoader(opts.templatedir) + self.j2env = jinja2.Environment(autoescape=True, trim_blocks=True, loader=self.j2loader) + self.j2env.filters['ymd'] = ymd + + self.group_kid = self.j2env.get_template(GRPKID) + self.pkg_kid = self.j2env.get_template(PKGKID) + + count = 0 + for group_data in self.groups + self.letter_groups: + (grp_name, grp_filename, grp_description, pkgnames) = group_data + + group_data = { + 'name': grp_name, + 'description': grp_description, + 'filename': grp_filename, + } + + packages = self.do_packages(repo_data, group_data, sorted(pkgnames)) + + if not packages: + # Empty groups are ignored + del self.groups[count] + continue + + count += 1 + + group_data['packages'] = packages + + checksum = self.mk_checksum(repo_data, group_data) + if self.has_changed(grp_filename, checksum): + # write group file + self.say('Writing group %s\n' % grp_filename) + outfile = os.path.join(self.outdir, grp_filename) + with open(outfile, "w") as fh: + fh.write(self.group_kid.render( + repo_data=repo_data, group_data=group_data + )) + + latest = self.get_latest_packages() + repo_data['latest'] = latest + repo_data['groups'] = self.groups + + checksum = self.mk_checksum(repo_data) + if self.has_changed('index.html', checksum): + # Write index.html and rss feed (if asked) + self.say('Writing index.html...') + idx_kid = self.j2env.get_template(IDXKID) + outfile = os.path.join(self.outdir, 'index.html') + with open(outfile, "w") as fh: + fh.write(idx_kid.render( + repo_data=repo_data, url=self.opts.url, latest=latest, + groups=self.groups, + time=time.strftime('%Y-%m-%d') + )) + self.say('done\n') + + # rss feed + if self.opts.url: + self.do_rss(repo_data, latest) + + self.remove_stale() + self.sconn.commit() + + def setup_state_db(self): + """ + Sets up the state-tracking database. + + @rtype: void + """ + self.say('Examining state db...') + if self.opts.statedir: + # we'll use the md5sum of the repo location to make it unique + unique = '%s.state.sqlite' % md5.md5(self.outdir).hexdigest() + statedb = os.path.join(self.opts.statedir, unique) + else: + statedb = os.path.join(self.outdir, 'state.sqlite') + + if os.access(statedb, os.W_OK): + if self.opts.force: + # clean slate -- remove state db and start over + os.unlink(statedb) + else: + # state_db not found, go into force mode + self.opts.force = True + + self.sconn = sqlite.connect(statedb) + scursor = self.sconn.cursor() + + query = """CREATE TABLE IF NOT EXISTS state ( + filename TEXT UNIQUE, + checksum TEXT)""" + scursor.execute(query) + + # read all state data into memory to track orphaned files + query = """SELECT filename, checksum FROM state""" + scursor.execute(query) + while True: + row = scursor.fetchone() + if row is None: + break + self.state_data[row[0]] = row[1] + self.say('done\n') + + def setup_repo(self): + """ + Examines the repository, makes sure that it's valid and supported, + and then opens the necessary databases. + + @rtype: void + """ + self.say('Examining repository...') + repomd = os.path.join(self.opts.repodir, 'repodata', 'repomd.xml') + + if not os.access(repomd, os.R_OK): + sys.stderr.write('Not found: %s\n' % repomd) + sys.stderr.write('Does not look like a repository. Exiting.\n') + sys.exit(1) + + repoxml = open(repomd).read() + + xml = fromstring(repoxml) #IGNORE:E1101 + # look for primary_db, other_db, and optionally group + + primary = other = comps = filelists = dbversion = None + + xmlns = 'http://linux.duke.edu/metadata/repo' + for datanode in xml.findall('{%s}data' % xmlns): + href = datanode.find('{%s}location' % xmlns).attrib['href'] + if datanode.attrib['type'] == 'primary_db': + primary = os.path.join(self.opts.repodir, href) + dbversion = datanode.find('{%s}database_version' % xmlns).text + elif datanode.attrib['type'] == 'other_db': + other = os.path.join(self.opts.repodir, href) + elif datanode.attrib['type'] == 'group': + comps = os.path.join(self.opts.repodir, href) + elif datanode.attrib['type'] == 'filelists_db': + filelists = os.path.join(self.opts.repodir, href) + + if primary is None or dbversion is None: + self.say('Sorry, sqlite files not found in the repository.\n' + 'Please rerun createrepo with a -d flag and try again.\n') + sys.exit(1) + + if int(dbversion) > SUPPORTED_DB_VERSION: + self.say('Sorry, the db_version in the repository is %s, but ' + 'repoview only supports versions up to %s. Please check ' + 'for a newer repoview version.\n' % (dbversion, + SUPPORTED_DB_VERSION)) + sys.exit(1) + + self.say('done\n') + + self.say('Opening primary database...') + primary = self.z_handler(primary) + self.pconn = sqlite.connect(primary) + self.say('done\n') + + self.say('Opening changelogs database...') + other = self.z_handler(other) + self.oconn = sqlite.connect(other) + self.say('done\n') + + self.say('Opening filelists database...') + filelists = self.z_handler(filelists) + self.fconn = sqlite.connect(filelists) + self.say('done\n') + + if self.opts.comps: + comps = self.opts.comps + + if comps: + self.setup_comps_groups(comps) + + def say(self, text): + """ + Unless in quiet mode, output the text passed. + + @param text: something to say + @type text: str + + @rtype: void + """ + if not self.opts.quiet: + sys.stdout.write(text) + + def setup_excludes(self): + """ + Formulates an SQL exclusion rule that we use throughout in order + to respect the ignores passed on the command line. + + @rtype: void + """ + # Formulate exclusion rule + xarches = [] + for xarch in self.opts.xarch: + xarch = xarch.replace("'", "''") + xarches.append("arch != '%s'" % xarch) + if xarches: + self.exclude += ' AND ' + ' AND '.join(xarches) + + pkgs = [] + for pkg in self.opts.ignore: + pkg = pkg.replace("'", "''") + pkg = pkg.replace("*", "%") + pkgs.append("name NOT LIKE '%s'" % pkg) + if pkgs: + self.exclude += ' AND ' + ' AND '.join(pkgs) + + def setup_outdir(self): + """ + Sets up the output directory. + + @rtype: void + """ + if self.opts.force and os.access(self.outdir, os.R_OK): + # clean slate -- remove everything + shutil.rmtree(self.outdir) + if not os.access(self.outdir, os.R_OK): + os.mkdir(self.outdir, 0o755) + + layoutsrc = os.path.join(self.opts.templatedir, 'layout') + layoutdst = os.path.join(self.outdir, 'layout') + if os.path.isdir(layoutsrc) and not os.access(layoutdst, os.R_OK): + self.say('Copying layout...') + shutil.copytree(layoutsrc, layoutdst) + self.say('done\n') + + def get_package_data(self, pkgname): + """ + Queries the packages and changelog databases and returns package data + in a dict: + + pkg_data = { + 'name': str, + 'filename': str, + 'summary': str, + 'description': str, + 'url': str, + 'rpm_license': str, + 'rpm_sourcerpm': str, + 'vendor': str, + 'rpms': [] + } + + the "rpms" key is a list of tuples with the following members: + (epoch, version, release, arch, time_build, size, location_href, + author, changelog, time_added) + + + @param pkgname: the name of the package to look up + @type pkgname: str + + @return: a REALLY hairy dict of values + @rtype: list + """ + # fetch versions + query = """SELECT pkgKey, + epoch, + version, + release, + arch, + summary, + description, + url, + time_build, + rpm_license, + rpm_sourcerpm, + size_package, + location_href, + rpm_vendor + FROM packages + WHERE name='%s' AND %s + ORDER BY arch ASC""" % (pkgname, self.exclude) + pcursor = self.pconn.cursor() + pcursor.execute(query) + + rows = pcursor.fetchall() + + if not rows: + # Sorry, nothing found + return None + + if len(rows) == 1: + # only one package matching this name + versions = [rows[0]] + else: + # we will use the latest package as the "master" to + # obtain things like summary, description, etc. + # go through all available packages and create a dict + # keyed by (e,v,r) + temp = {} + for row in rows: + temp[(row[1], row[2], row[3], row[4])] = row + + keys = list(temp.keys()) + keys.sort( + key=cmp_to_key(lambda a, b: labelCompare(a[:3], b[:3])), + reverse=True + ) + versions = [] + for key in keys: + versions.append(temp[key]) + + pkg_filename = _mkid(PKGFILE % pkgname) + + pkg_data = { + 'name': pkgname, + 'filename': pkg_filename, + 'summary': None, + 'description': None, + 'url': None, + 'rpm_license': None, + 'rpm_sourcerpm': None, + 'vendor': None, + 'rpms': [] + } + + for row in versions: + (pkg_key, epoch, version, release, arch, summary, + description, url, time_build, rpm_license, rpm_sourcerpm, + size_package, location_href, vendor) = row + if pkg_data['summary'] is None: + pkg_data['summary'] = summary + pkg_data['description'] = description + pkg_data['url'] = url + pkg_data['rpm_license'] = rpm_license + pkg_data['rpm_sourcerpm'] = rpm_sourcerpm + pkg_data['vendor'] = vendor + + size = _humansize(size_package) + + # Get latest changelog entry for each version + query = '''SELECT author, date, changelog + FROM changelog WHERE pkgKey=%d + ORDER BY date DESC LIMIT 1''' % pkg_key + ocursor = self.oconn.cursor() + ocursor.execute(query) + orow = ocursor.fetchone() + if not orow: + author = time_added = changelog = None + else: + (author, time_added, changelog) = orow + # strip email and everything that follows from author + try: + author = author[:author.index('<')].strip() + except ValueError: + pass + + filelist = [] + + query = ''' select dirname, filenames, filetypes from filelist + where pkgKey=%d order by dirname desc''' % pkg_key + + fcursor = self.fconn.cursor() + fcursor.execute(query) + frows = fcursor.fetchall() + + for frow in frows: + fidx = 0 + (dirname, filenames, filetypes) = frow + for fname in filenames.split('/'): + filelist.append((filetypes[fidx], (dirname + '/' + fname))) + fidx += 1 + + pkg_data['rpms'].append((epoch, version, release, arch, + time_build, size, location_href, + author, changelog, time_added, filelist)) + return pkg_data + + + def do_packages(self, repo_data, group_data, pkgnames): + """ + Iterate through package names and write the ones that changed. + + @param repo_data: the dict with repository data + @type repo_data: dict + @param group_data: the dict with group data + @type group_data: dict + @param pkgnames: a list of package names (strings) + @type pkgnames: list + + @return: a list of tuples related to packages, which we later use + to create the group page. The members are as such: + (pkg_name, pkg_filename, pkg_summary) + @rtype: list + """ + # this is what we return for the group object + pkg_tuples = [] + + for pkgname in pkgnames: + pkg_filename = _mkid(PKGFILE % pkgname) + + if pkgname in self.written.keys(): + pkg_tuples.append(self.written[pkgname]) + continue + + pkg_data = self.get_package_data(pkgname) + + if pkg_data is None: + # sometimes comps does not reflect reality + continue + + pkg_tuple = (pkgname, pkg_filename, pkg_data['summary']) + pkg_tuples.append(pkg_tuple) + + checksum = self.mk_checksum(repo_data, group_data, pkg_data) + if self.has_changed(pkg_filename, checksum): + self.say('Writing package %s\n' % pkg_filename) + self.pkg_kid.group_data = group_data + self.pkg_kid.pkg_data = pkg_data + outfile = os.path.join(self.outdir, pkg_filename) + with open(outfile, "w") as fh: + fh.write(self.pkg_kid.render( + repo_data=repo_data, + group_data=group_data, + pkg_data=pkg_data + )) + self.written[pkgname] = pkg_tuple + else: + self.written[pkgname] = pkg_tuple + + return pkg_tuples + + def mk_checksum(self, *args): + """ + A fairly dirty function used for state tracking. This is how we know + if the contents of the page have changed or not. + + @param *args: dicts + @rtype *args: dicts + + @return: an md5 checksum of the dicts passed + @rtype: str + """ + mangle = [] + for data in args: + # since dicts are non-deterministic, we get keys, then sort them, + # and then create a list of values, which we then pickle. + keys = data.keys() + + for key in sorted(keys): + mangle.append(data[key]) + return md5.md5(str(mangle).encode()).hexdigest() + + def has_changed(self, filename, checksum): + """ + Figure out if the contents of the filename have changed, and do the + necessary state database tracking bits. + + @param filename: the filename to check if it's changed + @type filename: str + @param checksum: the checksum from the current contents + @type checksum: str + + @return: true or false depending on whether the contents are different + @rtype: bool + """ + # calculate checksum + scursor = self.sconn.cursor() + if filename not in self.state_data.keys(): + # totally new entry + query = '''INSERT INTO state (filename, checksum) + VALUES ('%s', '%s')''' % (filename, checksum) + scursor.execute(query) + return True + if self.state_data[filename] != checksum: + # old entry, but changed + query = """UPDATE state + SET checksum='%s' + WHERE filename='%s'""" % (checksum, filename) + scursor.execute(query) + + # remove it from state_data tracking, so we know we've seen it + del self.state_data[filename] + return True + # old entry, unchanged + del self.state_data[filename] + return False + + def remove_stale(self): + """ + Remove errant stale files from the output directory, left from previous + repoview runs. + + @rtype void + """ + scursor = self.sconn.cursor() + for filename in self.state_data.keys(): + self.say('Removing stale file %s\n' % filename) + fullpath = os.path.join(self.outdir, filename) + if os.access(fullpath, os.W_OK): + os.unlink(fullpath) + query = """DELETE FROM state WHERE filename='%s'""" % filename + scursor.execute(query) + + def z_handler(self, dbfile): + """ + If the database file is compressed, uncompresses it and returns the + filename of the uncompressed file. + + @param dbfile: the name of the file + @type dbfile: str + + @return: the name of the uncompressed file + @rtype: str + """ + (_, ext) = os.path.splitext(dbfile) + + if ext == '.bz2': + from bz2 import BZ2File + zfd = BZ2File(dbfile) + elif ext == '.gz': + from gzip import GzipFile + zfd = GzipFile(dbfile) + elif ext == '.xz': + from lzma import LZMAFile + zfd = LZMAFile(dbfile) + else: + # not compressed (or something odd) + return dbfile + + import tempfile + (unzfd, unzname) = tempfile.mkstemp('.repoview') + self.cleanup.append(unzname) + + unzfd = open(unzname, 'wb') + + while True: + data = zfd.read(16384) + if not data: + break + unzfd.write(data) + zfd.close() + unzfd.close() + + return unzname + + def setup_comps_groups(self, compsxml): + """ + Utility method for parsing comps.xml. + + @param compsxml: the location of comps.xml + @type compsxml: str + + @rtype: void + """ + from yum.comps import Comps + + self.say('Parsing comps.xml...') + comps = Comps() + comps.add(compsxml) + + for group in comps.groups: + if not group.user_visible or not group.packages: + continue + group_filename = _mkid(GRPFILE % group.groupid) + self.groups.append([group.name, group_filename, group.description, + group.packages]) + self.say('done\n') + + def setup_rpm_groups(self): + """ + When comps is not around, we use the (useless) RPM groups. + + @rtype: void + """ + self.say('Collecting group information...') + query = """SELECT DISTINCT lower(rpm_group) AS rpm_group + FROM packages + ORDER BY rpm_group ASC""" + pcursor = self.pconn.cursor() + pcursor.execute(query) + + for (rpmgroup,) in pcursor.fetchall(): + qgroup = rpmgroup.replace("'", "''") + query = """SELECT DISTINCT name + FROM packages + WHERE lower(rpm_group)='%s' + AND %s + ORDER BY name""" % (qgroup, self.exclude) + pcursor.execute(query) + pkgnames = [] + for (pkgname,) in pcursor.fetchall(): + pkgnames.append(pkgname) + + group_filename = _mkid(GRPFILE % rpmgroup) + self.groups.append([rpmgroup, group_filename, None, pkgnames]) + self.say('done\n') + + def get_latest_packages(self, limit=30): + """ + Return necessary data for the latest NN packages. + + @param limit: how many do you want? + @type limit: int + + @return: a list of tuples containting the following data: + (pkgname, filename, version, release, built) + @rtype: list + """ + self.say('Collecting latest packages...') + query = """SELECT name + FROM packages + WHERE %s + GROUP BY name + ORDER BY MAX(time_build) DESC LIMIT %s""" % (self.exclude, limit) + pcursor = self.pconn.cursor() + pcursor.execute(query) + + latest = [] + query = """SELECT version, release, time_build + FROM packages + WHERE name = '%s' + ORDER BY time_build DESC LIMIT 1""" + for (pkgname,) in pcursor.fetchall(): + filename = _mkid(PKGFILE % pkgname.replace("'", "''")) + + pcursor.execute(query % pkgname) + (version, release, built) = pcursor.fetchone() + + latest.append((pkgname, filename, version, release, built)) + + self.say('done\n') + return latest + + def setup_letter_groups(self): + """ + Figure out which letters we have and set up the necessary groups. + + @return: a string containing all first letters of all packages + @rtype: str + """ + self.say('Collecting letters...') + query = """SELECT DISTINCT substr(upper(name), 1, 1) AS letter + FROM packages + WHERE %s + ORDER BY letter""" % self.exclude + pcursor = self.pconn.cursor() + pcursor.execute(query) + + letters = '' + for (letter,) in pcursor.fetchall(): + letters += letter + rpmgroup = 'Letter %s' % letter + description = 'Packages beginning with letter "%s".' % letter + + pkgnames = [] + query = """SELECT DISTINCT name + FROM packages + WHERE name LIKE '%s%%' + AND %s""" % (letter, self.exclude) + pcursor.execute(query) + for (pkgname,) in pcursor.fetchall(): + pkgnames.append(pkgname) + + group_filename = _mkid(GRPFILE % rpmgroup).lower() + letter_group = (rpmgroup, group_filename, description, pkgnames) + self.letter_groups.append(letter_group) + self.say('done\n') + return letters + + def do_rss(self, repo_data, latest): + """ + Write the RSS feed. + + @param repo_data: the dict containing repository data + @type repo_data: dict + @param latest: the list of tuples returned by get_latest_packages + @type latest: list + + @rtype: void + """ + self.say('Generating rss feed...') + etb = TreeBuilder() + out = os.path.join(self.outdir, RSSFILE) + etb.start('rss', {'version': '2.0'}) + etb.start('channel', {}) + etb.start('title', {}) + etb.data(repo_data['title']) + etb.end('title') + etb.start('link', {}) + etb.data('%s/repoview/%s' % (self.opts.url, RSSFILE)) + etb.end('link') + etb.start('description', {}) + etb.data('Latest packages for %s' % repo_data['title']) + etb.end('description') + etb.start('lastBuildDate', {}) + etb.data(time.strftime(ISOFORMAT)) + etb.end('lastBuildDate') + etb.start('generator', {}) + etb.data('Repoview-%s' % repo_data['my_version']) + etb.end('generator') + + rss_kid = self.j2env.get_template(RSSKID) + + for row in latest: + pkg_data = self.get_package_data(row[0]) + + rpm = pkg_data['rpms'][0] + (epoch, version, release, arch, built) = rpm[:5] + etb.start('item', {}) + etb.start('guid', {}) + etb.data('%s/repoview/%s+%s:%s-%s.%s' % (self.opts.url, + pkg_data['filename'], + epoch, version, release, + arch)) + etb.end('guid') + etb.start('link', {}) + etb.data('%s/repoview/%s' % (self.opts.url, pkg_data['filename'])) + etb.end('link') + etb.start('pubDate', {}) + etb.data(time.strftime(ISOFORMAT, time.gmtime(int(built)))) + etb.end('pubDate') + etb.start('title', {}) + etb.data('Update: %s-%s-%s' % (pkg_data['name'], version, release)) + etb.end('title') + etb.start('description', {}) + etb.data(rss_kid.render( + repo_data=repo_data, + url=self.opts.url, + pkg_data=pkg_data, + )) + etb.end('description') + etb.end('item') + + etb.end('channel') + etb.end('rss') + rss = etb.close() + + etree = ElementTree(rss) + out = os.path.join(self.outdir, RSSFILE) + etree.write(out, 'utf-8') + self.say('done\n') + + +def main(): + """ + Parse the options and invoke the repoview class. + + @rtype: void + """ + parser = ArgumentParser() + + parser.add_argument("args", nargs=1, help="path to the repository") + parser.add_argument('--version', action='version', version='%(prog)s '+VERSION) + parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', + help='Do not output anything except fatal errors.') + parser.add_argument('-f', '--force', dest='force', action='store_true', + help='Regenerate the pages even if the repomd checksum has not changed') + parser.add_argument('-s', '--state-dir', dest='statedir', + help='Create the state-tracking db in this directory ' + '(default: store in output directory)') + + repo_opts = parser.add_argument_group("repository specific options") + repo_opts.add_argument('-i', '--ignore-package', dest='ignore', action='append', + default=[], + help='Optionally ignore these packages -- can be a shell-style glob. ' + 'This is useful for excluding debuginfo packages, e.g.: ' + '"-i *debuginfo* -i *doc*". ' + 'The globbing will be done against name-epoch-version-release, ' + 'e.g.: "foo-0-1.0-1"') + repo_opts.add_argument('-x', '--exclude-arch', dest='xarch', action='append', + default=[], + help='Optionally exclude this arch. E.g.: "-x src -x ia64"') + repo_opts.add_argument('-c', '--comps', dest='comps', + help='Use an alternative comps.xml file (default: off)') + + tpl_opts = parser.add_argument_group("template specific options") + tpl_opts.add_argument('-k', '--template-dir', dest='templatedir', + default=DEFAULT_TEMPLATEDIR, + help='Use an alternative directory with kid templates instead of ' + 'the default: %(default)s. The template directory must contain four ' + 'required template files: index.kid, group.kid, package.kid, rss.kid ' + 'and the "layout" dir which will be copied into the repoview directory') + tpl_opts.add_argument('-o', '--output-dir', dest='outdir', + default='repoview', + help='Create the repoview pages in this subdirectory inside ' + 'the repository (default: "%(default)s")') + tpl_opts.add_argument('-t', '--title', dest='title', + default='Repoview', + help='Describe the repository in a few words. ' + 'By default, "%(default)s" is used. ' + 'E.g.: -t "Extras for Fedora Core 4 x86"') + tpl_opts.add_argument('-E', '--environment', dest='env', action='append', + help='Add environment variables for usage in templates. ' + 'E.g.: -E "foo=bar" -E "baz=yatta"') + rss_opts = parser.add_argument_group("RSS specific options") + rss_opts.add_argument('-u', '--url', dest='url', + help='Repository URL to use when generating the RSS feed. E.g.: ' + '-u "http://fedoraproject.org/extras/4/i386". Leaving it off will ' + 'skip the rss feed generation') + + opts = parser.parse_args() + opts.repodir = opts.args[0] + Repoview(opts) + +if __name__ == '__main__': + main() diff --git a/mangle/repoview/repoview.py b/mangle/repoview/repoview.py new file mode 100644 index 0000000..95a4647 --- /dev/null +++ b/mangle/repoview/repoview.py @@ -0,0 +1,632 @@ +#!/usr/bin/env python3 +# -*-:python; coding:utf-8; -*- +# author: Louis Abel +# Repoview3 is a utility to generate HTML pages for dnf +# repository, to make it easily browsable. +# +# This repoview has been rewritten to: +# +# * Work with current dnf implementations +# * Do not directly use nor look at repo metadata (dnf only) +# * Use jinja2 templating +# * Remove unneeded things such as RSS +# * Make it stateless for now (removal of sqlite) +# * Support only what's in yum.repos.d (for now) +# +# Current original (afaik) can be found here: +# +# https://github.com/sergiomb2/repoview +# +# TODO: +# * Setup a table of checksums? +# * Add support for group package type (e.g. mandatory) + +""" +Repoview3 is a utility that generates HTML pages for dnf repositories in order +to provide a browseable and viewable representation of a given repository. +Loosely based on the original yum implementation, this uses dnf-native modules +in order to gather and generate the right data. + +It is named repoview3 to distinguish itself from the original version, in that +it is fully written in python 3. + +@author: Louis Abel +@license: GPLv2 +""" + +# pylint: disable=unused-import +import os +import shutil +import sys +import time +import base64 +from hashlib import sha256 as shahex +from functools import cmp_to_key +import argparse +from xml.sax.saxutils import escape as xmlescape +from xml.etree.ElementTree import ElementTree, TreeBuilder, tostring +from xml.dom import minidom +import sqlite3 as sqlite +# pylint can't find this. it's fine to ignore. +# pylint: disable=no-name-in-module +from rpm import labelCompare as lc +import dnf +import dnf.exceptions +from jinja2 import Environment as j2env, FileSystemLoader as j2fsl + +VERSION = '0.1.0' +TEMPLATE_PKG = 'package.j2' +TEMPLATE_GRP = 'group.j2' +TEMPLATE_INDEX = 'index.j2' +FILE_PKG = '%s.html' +FILE_GRP = '%s.group.html' +FILE_INDEX = 'index.html' +FORMAT = "%a, %d %b %Y %X GMT" +ON_PAGE_FORMAT = "%Y-%m-%d" + +DEF_TEMPLATE_DIR = '/usr/share/repoview3/templates' + +def to_unicode(string: str) -> str: + """ + Convert to unicode + """ + if isinstance(string, bytes): + return string.decode('utf8') + if isinstance(string, str): + return string + return str(string) + +def to_base64(string: str) -> str: + """ + Converts a string to base64, but we put single quotes around it. This makes + it easier to regex the value. + """ + string_bytes = string.encode('utf-8') + string_conv = base64.b64encode(string_bytes) + base64_str = "'" + string_conv.decode('utf-8') + "'" + return str(base64_str) + +def from_base64(string: str) -> str: + """ + Takes a base64 value and returns a string. We also strip off any single + quotes that can happen. + """ + stripped = string.replace("'", "") + conv_bytes = stripped.encode('utf-8') + convd_bytes = base64.b64decode(conv_bytes) + decoded = convd_bytes.decode('utf-8') + return decoded + +def human_size(numbytes: int): + """ + Returns the size in units that makes sense (KiB or MiB). + """ + if numbytes < 1024: + return f'{numbytes} Bytes' + kilos = numbytes/1024 + if kilos/1024 < 1: + return f'{kilos} KiB' + floater = float(kilos)/1024 + return f'{floater} MiB' + +def unique_first_chara(lst: list) -> list: + """ + Returns a sorted unique list of the first characters of each list item + """ + uniques = list(set(pk[0] for pk in lst)) + uniques.sort() + return uniques + +def stamper(stamp): + """ + Returns a simple date or timestamp string + """ + return time.strftime(ON_PAGE_FORMAT, time.localtime(int(stamp))) + +def ezname(text): + """ + Make a web friendly name out of whatever text is thrown here + """ + text = text.replace('/', '.') + text = text.replace(' ', '_') + return text + +def uniqlist(lst): + """ + Takes a list and makes items unique + """ + new_list = list(dict.fromkeys(lst)) + new_list.sort() + return new_list + +class DnfQuiet(dnf.Base): + """ + DNF object + """ + def __init__(self): + dnf.Base.__init__(self) + + def substitute(self): + """ + Applies all vars from /etc/dnf/vars + """ + self.conf.substitutions.update_from_etc('/') + + def read_repos(self): + """ + Gets all dnf repos from the system + """ + self.read_all_repos() + + def get_data(self): + """ + Gets all dnf data as requested + """ + self.fill_sack() + + def get_group_objs(self): + """ + Return all groups in the form of a list + """ + available_groups = self.comps.groups + return available_groups + + def get_groups(self): + """ + Return all groups in the form of a list + """ + groups = [] + available_groups = self.comps.groups + for group in available_groups: + groups.append(group.name) + return groups + + def get_environments(self): + """ + Return all environments in the form of a list + """ + envs = [] + available_envs = self.comps.environments + for env in available_envs: + envs.append(env.name) + return envs + + def get_recent(self, days=1): + """ + Return most recent packages from dnf sack + """ + recent = [] + now = time.time() + recentlimit = now-(days*86400) + ftimehash = {} + if self.conf.showdupesfromrepos: + available = self.sack.query().available().filter() + else: + available = self.sack.query().available().filter(latest_per_arch=1) + + available.run() + + for package in available: + ftime = int(package.buildtime) + if ftime > recentlimit: + if ftime not in ftimehash: + ftimehash[ftime] = [package] + else: + ftimehash[ftime].append(package) + + for sometime in ftimehash.keys(): + for package in ftimehash[sometime]: + recent.append(package) + + return recent + +class RepoView: + """ + Does the actual repoview stuff + """ + def __init__(self, options): + """ + Initialize the RepoView class + """ + self.quiet = options.quiet + self.outdir = options.output_dir + self.link = options.link + self.title = options.title + self.desc = options.description + self.arches = options.arches + self.repoids = options.repoids + self.tmpldir = options.template_dir + + # dnf things + self.tempcache = options.tempcache + self.module_hotfixes = options.module_hotfixes + self.disable_modules = options.disable_all_modules + self.dnf_config = options.config + self.recents = options.recents + + # template things + self.j2loader = j2fsl(options.template_dir) + self.j2env = j2env(autoescape=True, trim_blocks=True, loader=self.j2loader) + self.j2env.filters['stamper'] = stamper + #self.group_template = self.j2env.get_template(TEMPLATE_GRP) + #self.package_template = self.j2env.get_template(TEMPLATE_PKG) + #self.index_template = self.j2env.get_template(TEMPLATE_INDEX) + + # Actually do dnf stuff right here + dnfobj = DnfQuiet() + self.sout('Loading dnf') + if options.config: + self.sout('Loading config') + dnfobj.conf.read(filename=options.config) + + if os.geteuid() != 0 or options.tempcache: + cachedir = dnfobj.conf.cachedir + if cachedir is None: + self.serr('Error: Could not make cachedir') + sys.exit(50) + dnfobj.conf.cachedir = cachedir + + try: + dnfobj.read_all_repos() + except: + self.serr('Could not read repos') + sys.exit(1) + + if len(self.repoids) > 0: + for repo in dnfobj.repos: + repoobj = dnfobj.repos[repo] + if repo not in self.repoids: + repoobj.disable() + else: + repoobj.enable() + if options.module_hotfixes: + try: + repoobj.set_or_append_opt_value('module_hotfixes', '1') + except: + self.serr('Warning: dnf library is too old to support setting values') + + repoobj.load_metadata_other = True + + self.sout('Getting all repo metadata') + try: + dnfobj.get_data() + except: + self.serr('repo data failure') + sys.exit(1) + + if self.disable_modules: + modobj = dnf.module.module_base.ModuleBase(dnfobj) + modobj.disable(['*']) + + # data things + self.sout('Obtaining group information') + groups = dnfobj.get_group_objs() + self.groups = self.get_group_data(groups) + self.sout('Obtaining environment information') + self.environments = dnfobj.get_environments() + + # package things + self.sout('Obtaining all package information') + self.sack_query = dnfobj.sack.query().available() + all_pkgs = self.sack_query.filter() + self.sout('Sorting packages by name') + self.named_pkgs = sorted(set(all_pkgs), key=lambda pkg: pkg.name) + self.sout('Sorting packages by build time') + sorted_pkgs = sorted(set(all_pkgs), key=lambda pkg: pkg.buildtime) + sorted_pkgs.reverse() + self.sout('Getting unique first character list') + package_names = list(set(pkg.name for pkg in sorted_pkgs)) + letters = unique_first_chara(package_names) + self.sout('Getting letter group package lists') + self.letter_groups = self.get_letter_group_data(letters) + self.sout(f'Getting {self.recents} of the latest packages') + self.latest = self.proc_latest(sorted_pkgs[:self.recents]) + + self.repo_filler = { + 'title': self.title, + 'letters': letters, + 'version': VERSION, + 'latest': self.latest + } + + self.sout('Beginning to process data') + self.proc_groups() + + def proc_groups(self): + """ + Process group data + """ + self.sout('Processing group data') + counter = 0 + for group_filler in self.groups + self.letter_groups: + (group_name, group_description, group_file, pkg_list) = group_filler + + group_filler = { + 'name': group_name, + 'description': group_description, + 'filename': group_file + } + + packages = self.proc_packages( + self.repo_filler, + group_filler, + sorted(pkg_list) + ) + + def proc_packages(self, repo_data, group_data, pkg_list): + """ + Process package data + """ + pkgtups = [] + written = {} + + for pkg in pkg_list: + pkg_file = ezname(FILE_PKG % pkg) + if pkg in written.keys(): + pkgtups.append(written[pkg]) + continue + + pkg_data = self.get_package_data(pkg) + + # This shouldn't happen, but sometimes groups in comps + # are just inaccurate. + if pkg_data is None: + continue + + pkgtup = (pkg, pkg_file, pkg_data['summary']) + pkgtups.append(pkgtup) + self.sout(f'Writing package {pkg} to {pkg_file}') + #self.package_template.group_data = group_data + #self.package_template.pkg_data = pkg_data + #output_file = os.path.join(self.outdir, pkg_file) + #with open(output_file, "w+") as of: + # of.write(self.package_template.render( + # repo_data=repo_data, + # group_data=group_data, + # pkg_data=pkg_data + # )) + + #written[pkg] = pkgtup + return pkgtups + + def proc_latest(self, pkglist): + """ + Process the list of latest packages and return a list of tuples + """ + tuplist = [] + for pkg in pkglist: + filename = ezname(FILE_PKG % pkg.name) + tuplist.append((pkg.name, filename, pkg.version, pkg.release, pkg.buildtime)) + + return tuplist + + def get_package_data(self, name): + """ + Returns a dict of package information + """ + pkg_query = self.sack_query.filter(name=name) + # we only want data from the first finding, in the case of + # multi-version or multilib. but we also have to account for multiple + if len(pkg_query) == 1: + pkg_info = self._pkg_return(pkg_query[0]) + versions = [pkg_info] + else: + # for loop against the pkg_query and get the data + # make sure that we only care about evra. + # later we'll compare against evr for version ordering + tempcheck = {} + versions = [] + for vers in pkg_query: + #tempcheck[(vers.epoch, vers.version, vers.release, vers.arch)] = vers + tempcheck[(vers.epoch, vers.version, vers.release)] = vers + + keys = list(tempcheck.keys()) + keys.sort( + key=cmp_to_key(lambda a, b: lc(a[:3], b[:3])), + reverse=True + ) + for key in keys: + versions.append(tempcheck[key]) + + pkg_file = ezname(FILE_PKG % name) + pkg_data = { + 'name': name, + 'filename': pkg_file, + 'summary': None, + 'description': None, + 'url': None, + 'license': None, + 'sourcerpm': None, + 'vendor': None, + 'rpms': [] + } + + print(versions) + for data in versions: + (name, epoch, version, release, arch, summary, description, url, + buildtime, rpmlicense, sourcerpm, size, location, remote_location, + vendor, changelogs, filelist) = data + # we have to check this because if we have multiple + # versions/packages we want to make sure we don't keep adding data + # that's already there + if pkg_data['summary'] is None: + pkg_data['summary'] = summary + pkg_data['description'] = description + pkg_data['url'] = url + pkg_data['license'] = rpmlicense + pkg_data['sourcerpm'] = sourcerpm + pkg_data['vendor'] = vendor + + size = human_size(size) + + # changelog stuff + if changelogs is not None: + changelog_list = changelogs.copy() + else: + changelog_list = [] + for meta in changelog_list[:2]: + author = meta['author'] + try: + author = author[:author.index('<')].strip() + except ValueError: + pass + meta['author'] = author + meta['text'].replace("\n", "
\n") + + pkg_data['rpms'].append(( + epoch, + version, + release, + arch, + buildtime, + size, + location, + remote_location, + changelog_list, + filelist + )) + + return pkg_data + + def get_group_data(self, groups): + """ + Returns a tuple of group information + """ + list_of_list = [] + pkg_list = [] + for group in groups: + all_group_pkgs = (group.default_packages + + group.mandatory_packages + + group.optional_packages + + group.conditional_packages) + if not group.visible or not all_group_pkgs: + continue + for pkg in all_group_pkgs: + pkg_list.append(pkg.name) + group_filename = ezname(FILE_GRP % group.id) + list_of_list.append([group.ui_name, + group.ui_description, + group_filename, + pkg_list]) + + return list_of_list + + def get_letter_group_data(self, letters): + """ + Returns data on packages part of a letter group + """ + list_of_list = [] + for group in letters: + pkggroup = f'Letter {group}' + description = f'Packages beginning with the letter "{group}"' + filtered = self.sack_query.filter(name__glob=f'{group}*') + pkgs = [] + for filt in filtered: + pkgs.append(filt.name) + + # There is a chance that a package may be multi-lib or may have + # multiple versions of itself in a repo + uniqpkgs = uniqlist(pkgs) + group_filename = ezname(FILE_GRP % group) + list_of_list.append([pkggroup, + description, + group_filename, + uniqpkgs]) + + return list_of_list + + def setup_output(self): + """ + Setup the output directory + """ + if os.access(self.outdir, os.R_OK): + shutil.rmtree(self.outdir) + else: + os.mkdir(self.outdir, 0o755) + + # Layouts can be created - This is a carry over from the former repoview + self.sout('Checking if we have a layout to copy') + layout_src = os.path.join(self.tmpldir, 'layout') + layout_dest = os.path.join(self.outdir, 'layout') + if os.path.isdir(layout_src) and not os.access(layout_dest, os.R_OK): + self.sout('Copying layout') + shutil.copytree(layout_src, layout_dest) + + def sout(self, msg): + """ + Send a message to stdout + """ + if not self.quiet: + sys.stdout.write(msg + '\n') + + def serr(self, msg): + """ + Send a message to stderr. Pierces quiet mode. + """ + sys.stderr.write(msg + '\n') + + @staticmethod + def _pkg_return(data): + """ + Returns a tuple of needed package data. This func is to avoid + duplicating the work in proc packages + """ + ordered_data = ( + data.name, + data.epoch, + data.version, + data.release, + data.arch, + data.summary, + data.description, + data.url, + data.buildtime, + data.license, + data.sourcerpm, + data.size, + data.location, + data.remote_location(), + data.vendor, + data.changelogs, + data.files + ) + return ordered_data + +def main(options): + """ + Start up the repoview script + """ + RepoView(options) + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--link', type=str, default='https://github.com/rpm-software-management/dnf', + help='URL link to repository root') + parser.add_argument('--title', type=str, default='Repository Packages', + help='Title of the page') + parser.add_argument('--description', type=str, + default='Package, group, and general repository information', + help='Description of the feed') + parser.add_argument('--quiet', action='store_true', + help='Prevents messages on stdout and stderr.') + + dnf_opts = parser.add_argument_group("dnf options") + dnf_opts.add_argument('--tempcache', action='store_true', + help='Temporary cache location (automatically on if not root)') + dnf_opts.add_argument('--module-hotfixes', action='store_true', + help='Use this to catch all module packages alongside everything else') + dnf_opts.add_argument('--arches', action='append', default=[], + help='List of architectures to care about') + dnf_opts.add_argument('--config', type=str, default='', + help='A dnf configuration to use if you do not want to use the default') + dnf_opts.add_argument('--disable-all-modules', action='store_true', + help='Disables all modules. Useful for getting newer than 8 data.') + dnf_opts.add_argument('--recents', type=int, default=30, help='Number of latest packages') + dnf_opts.add_argument('repoids', metavar='N', type=str, nargs='+') + template_opts = parser.add_argument_group('template options') + template_opts.add_argument('--output-dir', type=str, default='repoview') + template_opts.add_argument('--template-dir', type=str, + default=DEF_TEMPLATE_DIR) + results = parser.parse_args() + + main(results) diff --git a/mangle/repoview/templates/style/style.css b/mangle/repoview/templates/style/style.css new file mode 100644 index 0000000..15b3504 --- /dev/null +++ b/mangle/repoview/templates/style/style.css @@ -0,0 +1,123 @@ +.nav { + text-align: right; + color: gray; + font-size: small; + } +.nactive { + border-bottom: 1px dotted blue; + background-color: lavender; + color: blue; + font-size: small; + text-decoration: none; + } +.ninactive { + color: gray; + font-size: small; + } +.nlink { + text-decoration: none; + color: blue; + font-size: small; + } +.nlink:hover { + background-color: lavender; + border-bottom: 1px dotted blue; + font-size: small; + } +.inpage { + text-decoration: none; + color: blue; + } +.inpage:hover { + background-color: mistyrose; + color: red; + border-bottom: 1px dotted red; + } +.levbar { + position: absolute; + top: 0px; + left: 0px; + width: 11em; + height: 100%; + border-right: 4px dotted gray; + border-bottom: 4px dotted gray; + background-color: gainsboro; + } +.main { + position: absolute; + left: 13em; + width: 75%; + } +h1,h2,h3,h4,h5 { + border-bottom: 1px dotted gray; + border-top: 1px dotted gray; + background-color: whitesmoke; + font-weight: normal; + } +.pagetitle { + border-top: 1px dotted gray; + border-bottom: 1px dotted gray; + padding-top: 5%; + padding-bottom: 5%; + margin-top: 5%; + margin-bottom: 5%; + text-align: center; + width: 100%; + color: gray; + background-color: white; + } +dt { + font-weight: bold; + margin-top: 1%; + } +th { + background-color: whitesmoke; + text-align: left; + } +.field { + background-color: whitesmoke; + text-align: right; + } +.levbarlist { + list-style-type: none; + padding: 5%; + border-top: 1px dotted gray; + border-bottom: 1px dotted gray; + background-color: whitesmoke; + } +.pkglist { + padding-top: 2%; + padding-bottom: 2%; + list-style-type: circle; + } +.letterlist { + background-color: whitesmoke; + } +.letterlist a { + padding-left: 0.2em; + padding-right: 0.2em; + } +.footernote { + text-align: right; + font-size: small; + background-color: whitesmoke; + border-top: 1px dotted gray; + color: gray; + } +.repoview { + text-decoration: none; + color: gray; + border-bottom: 1px dotted gray; + font-size: small; + } +.repoview:hover { + background-color: lavender; + border-bottom: 1px dotted blue; + color: blue; + font-size: small; + } +.rpmcontents { + display: none; + font-size: x-small; + font-family: monospace + }