Compare commits

...

No commits in common. "main" and "gh-pages" have entirely different histories.

126 changed files with 52818 additions and 3264 deletions

View File

@ -1,40 +0,0 @@
name: mkdocs build
on:
push:
branches:
- main
jobs:
build:
runs-on: ubuntu-latest
container:
image: docker.io/rockylinux:9
steps:
- name: Install deps
run: dnf -y install git python3 python3-pip
- name: setup ssh
env:
SSH_KEY: "${{ secrets.SSH_KEY }}"
run: |
mkdir -p ~/.ssh/
echo "$SSH_KEY" > ~/.ssh/id_rsa
chmod 0700 ~/.ssh/
chmod 0600 ~/.ssh/id_rsa
- name: checkout
env:
GIT_SSH_COMMAND: "ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
run:
git clone https://git.resf.org/$GITHUB_REPOSITORY.git $GITHUB_WORKSPACE
- name: Install python requirements
run: python3 -m pip install -r requirements.txt
- name: Deploy
env:
GIT_SSH_COMMAND: "ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
GIT_AUTHOR_NAME: "Rocky Bot"
GIT_AUTHOR_EMAIL: "auto@rockylinux.org"
run: |
git remote set-url origin ssh://git@git.resf.org:22220/$GITHUB_REPOSITORY.git
python3 -m mkdocs gh-deploy --force

1
.gitignore vendored
View File

@ -1 +0,0 @@
site/

0
.nojekyll Normal file
View File

1163
404.html Normal file

File diff suppressed because it is too large Load Diff

View File

View File

@ -1,14 +0,0 @@
# Release Engineering (SIG/Core) Wiki
[![mkdocs build](https://github.com/rocky-linux/sig-core-wiki/actions/workflows/gh.yml/badge.svg)](https://github.com/rocky-linux/sig-core-wiki/actions/workflows/gh.yml)
This is the wiki repository for SIG/Core.
https://sig-core.rocky.page
## Building
To test the appearance of this wiki, give it a try:
```
```

View File

Before

Width:  |  Height:  |  Size: 487 B

After

Width:  |  Height:  |  Size: 487 B

BIN
assets/images/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,18 @@
/*!
* Lunr languages, `Danish` language
* https://github.com/MihaiValentin/lunr-languages
*
* Copyright 2014, Mihai Valentin
* http://www.mozilla.org/MPL/
*/
/*!
* based on
* Snowball JavaScript Library v0.3
* http://code.google.com/p/urim/
* http://snowball.tartarus.org/
*
* Copyright 2010, Oleg Mazko
* http://www.mozilla.org/MPL/
*/
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=function(){var r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){function e(){var e,r=f.cursor+3;if(d=f.limit,0<=r&&r<=f.limit){for(a=r;;){if(e=f.cursor,f.in_grouping(w,97,248)){f.cursor=e;break}if(f.cursor=e,e>=f.limit)return;f.cursor++}for(;!f.out_grouping(w,97,248);){if(f.cursor>=f.limit)return;f.cursor++}d=f.cursor,d<a&&(d=a)}}function n(){var e,r;if(f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(c,32),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del();break;case 2:f.in_grouping_b(p,97,229)&&f.slice_del()}}function t(){var e,r=f.limit-f.cursor;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.find_among_b(l,4)?(f.bra=f.cursor,f.limit_backward=e,f.cursor=f.limit-r,f.cursor>f.limit_backward&&(f.cursor--,f.bra=f.cursor,f.slice_del())):f.limit_backward=e)}function s(){var e,r,i,n=f.limit-f.cursor;if(f.ket=f.cursor,f.eq_s_b(2,"st")&&(f.bra=f.cursor,f.eq_s_b(2,"ig")&&f.slice_del()),f.cursor=f.limit-n,f.cursor>=d&&(r=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,e=f.find_among_b(m,5),f.limit_backward=r,e))switch(f.bra=f.cursor,e){case 1:f.slice_del(),i=f.limit-f.cursor,t(),f.cursor=f.limit-i;break;case 2:f.slice_from("løs")}}function o(){var e;f.cursor>=d&&(e=f.limit_backward,f.limit_backward=d,f.ket=f.cursor,f.out_grouping_b(w,97,248)?(f.bra=f.cursor,u=f.slice_to(u),f.limit_backward=e,f.eq_v_b(u)&&f.slice_del()):f.limit_backward=e)}var a,d,u,c=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],l=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],w=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],p=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],f=new i;this.setCurrent=function(e){f.setCurrent(e)},this.getCurrent=function(){return f.getCurrent()},this.stem=function(){var r=f.cursor;return e(),f.limit_backward=r,f.cursor=f.limit,n(),f.cursor=f.limit,t(),f.cursor=f.limit,s(),f.cursor=f.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}}(),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hi=function(){this.pipeline.reset(),this.pipeline.add(e.hi.trimmer,e.hi.stopWordFilter,e.hi.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.hi.stemmer))},e.hi.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿa-zA-Z--0-9-",e.hi.trimmer=e.trimmerSupport.generateTrimmer(e.hi.wordCharacters),e.Pipeline.registerFunction(e.hi.trimmer,"trimmer-hi"),e.hi.stopWordFilter=e.generateStopWordFilter("अत अपना अपनी अपने अभी अंदर आदि आप इत्यादि इन इनका इन्हीं इन्हें इन्हों इस इसका इसकी इसके इसमें इसी इसे उन उनका उनकी उनके उनको उन्हीं उन्हें उन्हों उस उसके उसी उसे एक एवं एस ऐसे और कई कर करता करते करना करने करें कहते कहा का काफ़ी कि कितना किन्हें किन्हों किया किर किस किसी किसे की कुछ कुल के को कोई कौन कौनसा गया घर जब जहाँ जा जितना जिन जिन्हें जिन्हों जिस जिसे जीधर जैसा जैसे जो तक तब तरह तिन तिन्हें तिन्हों तिस तिसे तो था थी थे दबारा दिया दुसरा दूसरे दो द्वारा न नके नहीं ना निहायत नीचे ने पर पहले पूरा पे फिर बनी बही बहुत बाद बाला बिलकुल भी भीतर मगर मानो मे में यदि यह यहाँ यही या यिह ये रखें रहा रहे ऱ्वासा लिए लिये लेकिन व वग़ैरह वर्ग वह वहाँ वहीं वाले वुह वे वो सकता सकते सबसे सभी साथ साबुत साभ सारा से सो संग ही हुआ हुई हुए है हैं हो होता होती होते होना होने".split(" ")),e.hi.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.hi.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var t=i.toString().toLowerCase().replace(/^\s+/,"");return r.cut(t).split("|")},e.Pipeline.registerFunction(e.hi.stemmer,"stemmer-hi"),e.Pipeline.registerFunction(e.hi.stopWordFilter,"stopWordFilter-hi")}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.hy=function(){this.pipeline.reset(),this.pipeline.add(e.hy.trimmer,e.hy.stopWordFilter)},e.hy.wordCharacters="[A-Za-z԰-֏ff-ﭏ]",e.hy.trimmer=e.trimmerSupport.generateTrimmer(e.hy.wordCharacters),e.Pipeline.registerFunction(e.hy.trimmer,"trimmer-hy"),e.hy.stopWordFilter=e.generateStopWordFilter("դու և եք էիր էիք հետո նաև նրանք որը վրա է որ պիտի են այս մեջ ն իր ու ի այդ որոնք այն կամ էր մի ես համար այլ իսկ էին ենք հետ ին թ էինք մենք նրա նա դուք եմ էի ըստ որպես ում".split(" ")),e.Pipeline.registerFunction(e.hy.stopWordFilter,"stopWordFilter-hy"),e.hy.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}(),e.Pipeline.registerFunction(e.hy.stemmer,"stemmer-hy")}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.ja=function(){this.pipeline.reset(),this.pipeline.add(e.ja.trimmer,e.ja.stopWordFilter,e.ja.stemmer),r?this.tokenizer=e.ja.tokenizer:(e.tokenizer&&(e.tokenizer=e.ja.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.ja.tokenizer))};var t=new e.TinySegmenter;e.ja.tokenizer=function(i){var n,o,s,p,a,u,m,l,c,f;if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(o=i.toString().toLowerCase().replace(/^\s+/,""),n=o.length-1;n>=0;n--)if(/\S/.test(o.charAt(n))){o=o.substring(0,n+1);break}for(a=[],s=o.length,c=0,l=0;c<=s;c++)if(u=o.charAt(c),m=c-l,u.match(/\s/)||c==s){if(m>0)for(p=t.segment(o.slice(l,c)).filter(function(e){return!!e}),f=l,n=0;n<p.length;n++)r?a.push(new e.Token(p[n],{position:[f,p[n].length],index:a.length})):a.push(p[n]),f+=p[n].length;l=c+1}return a},e.ja.stemmer=function(){return function(e){return e}}(),e.Pipeline.registerFunction(e.ja.stemmer,"stemmer-ja"),e.ja.wordCharacters="一二三四五六七八九十百千万億兆一-龠々〆ヵヶぁ-んァ-ヴーア-ン゙a-zA-Z--0-9-",e.ja.trimmer=e.trimmerSupport.generateTrimmer(e.ja.wordCharacters),e.Pipeline.registerFunction(e.ja.trimmer,"trimmer-ja"),e.ja.stopWordFilter=e.generateStopWordFilter("これ それ あれ この その あの ここ そこ あそこ こちら どこ だれ なに なん 何 私 貴方 貴方方 我々 私達 あの人 あのかた 彼女 彼 です あります おります います は が の に を で え から まで より も どの と し それで しかし".split(" ")),e.Pipeline.registerFunction(e.ja.stopWordFilter,"stopWordFilter-ja"),e.jp=e.ja,e.Pipeline.registerFunction(e.jp.stemmer,"stemmer-jp"),e.Pipeline.registerFunction(e.jp.trimmer,"trimmer-jp"),e.Pipeline.registerFunction(e.jp.stopWordFilter,"stopWordFilter-jp")}});

View File

@ -0,0 +1 @@
module.exports=require("./lunr.ja");

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.kn=function(){this.pipeline.reset(),this.pipeline.add(e.kn.trimmer,e.kn.stopWordFilter,e.kn.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.kn.stemmer))},e.kn.wordCharacters="ಀ-಄ಅ-ಔಕ-ಹಾ-ೌ಼-ಽೕ-ೖೝ-ೞೠ-ೡೢ-ೣ೤೥೦-೯ೱ-ೳ",e.kn.trimmer=e.trimmerSupport.generateTrimmer(e.kn.wordCharacters),e.Pipeline.registerFunction(e.kn.trimmer,"trimmer-kn"),e.kn.stopWordFilter=e.generateStopWordFilter("ಮತ್ತು ಈ ಒಂದು ರಲ್ಲಿ ಹಾಗೂ ಎಂದು ಅಥವಾ ಇದು ರ ಅವರು ಎಂಬ ಮೇಲೆ ಅವರ ತನ್ನ ಆದರೆ ತಮ್ಮ ನಂತರ ಮೂಲಕ ಹೆಚ್ಚು ನ ಆ ಕೆಲವು ಅನೇಕ ಎರಡು ಹಾಗು ಪ್ರಮುಖ ಇದನ್ನು ಇದರ ಸುಮಾರು ಅದರ ಅದು ಮೊದಲ ಬಗ್ಗೆ ನಲ್ಲಿ ರಂದು ಇತರ ಅತ್ಯಂತ ಹೆಚ್ಚಿನ ಸಹ ಸಾಮಾನ್ಯವಾಗಿ ನೇ ಹಲವಾರು ಹೊಸ ದಿ ಕಡಿಮೆ ಯಾವುದೇ ಹೊಂದಿದೆ ದೊಡ್ಡ ಅನ್ನು ಇವರು ಪ್ರಕಾರ ಇದೆ ಮಾತ್ರ ಕೂಡ ಇಲ್ಲಿ ಎಲ್ಲಾ ವಿವಿಧ ಅದನ್ನು ಹಲವು ರಿಂದ ಕೇವಲ ದ ದಕ್ಷಿಣ ಗೆ ಅವನ ಅತಿ ನೆಯ ಬಹಳ ಕೆಲಸ ಎಲ್ಲ ಪ್ರತಿ ಇತ್ಯಾದಿ ಇವು ಬೇರೆ ಹೀಗೆ ನಡುವೆ ಇದಕ್ಕೆ ಎಸ್ ಇವರ ಮೊದಲು ಶ್ರೀ ಮಾಡುವ ಇದರಲ್ಲಿ ರೀತಿಯ ಮಾಡಿದ ಕಾಲ ಅಲ್ಲಿ ಮಾಡಲು ಅದೇ ಈಗ ಅವು ಗಳು ಎ ಎಂಬುದು ಅವನು ಅಂದರೆ ಅವರಿಗೆ ಇರುವ ವಿಶೇಷ ಮುಂದೆ ಅವುಗಳ ಮುಂತಾದ ಮೂಲ ಬಿ ಮೀ ಒಂದೇ ಇನ್ನೂ ಹೆಚ್ಚಾಗಿ ಮಾಡಿ ಅವರನ್ನು ಇದೇ ಯ ರೀತಿಯಲ್ಲಿ ಜೊತೆ ಅದರಲ್ಲಿ ಮಾಡಿದರು ನಡೆದ ಆಗ ಮತ್ತೆ ಪೂರ್ವ ಆತ ಬಂದ ಯಾವ ಒಟ್ಟು ಇತರೆ ಹಿಂದೆ ಪ್ರಮಾಣದ ಗಳನ್ನು ಕುರಿತು ಯು ಆದ್ದರಿಂದ ಅಲ್ಲದೆ ನಗರದ ಮೇಲಿನ ಏಕೆಂದರೆ ರಷ್ಟು ಎಂಬುದನ್ನು ಬಾರಿ ಎಂದರೆ ಹಿಂದಿನ ಆದರೂ ಆದ ಸಂಬಂಧಿಸಿದ ಮತ್ತೊಂದು ಸಿ ಆತನ ".split(" ")),e.kn.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.kn.tokenizer=function(t){if(!arguments.length||null==t||void 0==t)return[];if(Array.isArray(t))return t.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var n=t.toString().toLowerCase().replace(/^\s+/,"");return r.cut(n).split("|")},e.Pipeline.registerFunction(e.kn.stemmer,"stemmer-kn"),e.Pipeline.registerFunction(e.kn.stopWordFilter,"stopWordFilter-kn")}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){e.multiLanguage=function(){for(var t=Array.prototype.slice.call(arguments),i=t.join("-"),r="",n=[],s=[],p=0;p<t.length;++p)"en"==t[p]?(r+="\\w",n.unshift(e.stopWordFilter),n.push(e.stemmer),s.push(e.stemmer)):(r+=e[t[p]].wordCharacters,e[t[p]].stopWordFilter&&n.unshift(e[t[p]].stopWordFilter),e[t[p]].stemmer&&(n.push(e[t[p]].stemmer),s.push(e[t[p]].stemmer)));var o=e.trimmerSupport.generateTrimmer(r);return e.Pipeline.registerFunction(o,"lunr-multi-trimmer-"+i),n.unshift(o),function(){this.pipeline.reset(),this.pipeline.add.apply(this.pipeline,n),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add.apply(this.searchPipeline,s))}}}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,18 @@
/*!
* Lunr languages, `Norwegian` language
* https://github.com/MihaiValentin/lunr-languages
*
* Copyright 2014, Mihai Valentin
* http://www.mozilla.org/MPL/
*/
/*!
* based on
* Snowball JavaScript Library v0.3
* http://code.google.com/p/urim/
* http://snowball.tartarus.org/
*
* Copyright 2010, Oleg Mazko
* http://www.mozilla.org/MPL/
*/
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){function e(){var e,r=w.cursor+3;if(a=w.limit,0<=r||r<=w.limit){for(s=r;;){if(e=w.cursor,w.in_grouping(d,97,248)){w.cursor=e;break}if(e>=w.limit)return;w.cursor=e+1}for(;!w.out_grouping(d,97,248);){if(w.cursor>=w.limit)return;w.cursor++}a=w.cursor,a<s&&(a=s)}}function i(){var e,r,n;if(w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(m,29),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:n=w.limit-w.cursor,w.in_grouping_b(c,98,122)?w.slice_del():(w.cursor=w.limit-n,w.eq_s_b(1,"k")&&w.out_grouping_b(d,97,248)&&w.slice_del());break;case 3:w.slice_from("er")}}function t(){var e,r=w.limit-w.cursor;w.cursor>=a&&(e=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,w.find_among_b(u,2)?(w.bra=w.cursor,w.limit_backward=e,w.cursor=w.limit-r,w.cursor>w.limit_backward&&(w.cursor--,w.bra=w.cursor,w.slice_del())):w.limit_backward=e)}function o(){var e,r;w.cursor>=a&&(r=w.limit_backward,w.limit_backward=a,w.ket=w.cursor,e=w.find_among_b(l,11),e?(w.bra=w.cursor,w.limit_backward=r,1==e&&w.slice_del()):w.limit_backward=r)}var s,a,m=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],u=[new r("dt",-1,-1),new r("vt",-1,-1)],l=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],c=[119,125,149,1],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,i(),w.cursor=w.limit,t(),w.cursor=w.limit,o(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}}(),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sa=function(){this.pipeline.reset(),this.pipeline.add(e.sa.trimmer,e.sa.stopWordFilter,e.sa.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sa.stemmer))},e.sa.wordCharacters="ऀ-ःऄ-एऐ-टठ-यर-िी-ॏॐ-य़ॠ-९॰-ॿ꣠-꣱ꣲ-ꣷ꣸-ꣻ꣼-ꣽꣾ-ꣿᆰ0-ᆰ9",e.sa.trimmer=e.trimmerSupport.generateTrimmer(e.sa.wordCharacters),e.Pipeline.registerFunction(e.sa.trimmer,"trimmer-sa"),e.sa.stopWordFilter=e.generateStopWordFilter('तथा अयम्‌ एकम्‌ इत्यस्मिन्‌ तथा तत्‌ वा अयम्‌ इत्यस्य ते आहूत उपरि तेषाम्‌ किन्तु तेषाम्‌ तदा इत्यनेन अधिकः इत्यस्य तत्‌ केचन बहवः द्वि तथा महत्वपूर्णः अयम्‌ अस्य विषये अयं अस्ति तत्‌ प्रथमः विषये इत्युपरि इत्युपरि इतर अधिकतमः अधिकः अपि सामान्यतया ठ इतरेतर नूतनम्‌ द न्यूनम्‌ कश्चित्‌ वा विशालः द सः अस्ति तदनुसारम् तत्र अस्ति केवलम्‌ अपि अत्र सर्वे विविधाः तत्‌ बहवः यतः इदानीम्‌ द दक्षिण इत्यस्मै तस्य उपरि नथ अतीव कार्यम्‌ सर्वे एकैकम्‌ इत्यादि। एते सन्ति उत इत्थम्‌ मध्ये एतदर्थं . स कस्य प्रथमः श्री. करोति अस्मिन् प्रकारः निर्मिता कालः तत्र कर्तुं समान अधुना ते सन्ति स एकः अस्ति सः अर्थात् तेषां कृते . स्थितम् विशेषः अग्रिम तेषाम्‌ समान स्रोतः ख म समान इदानीमपि अधिकतया करोतु ते समान इत्यस्य वीथी सह यस्मिन् कृतवान्‌ धृतः तदा पुनः पूर्वं सः आगतः किम्‌ कुल इतर पुरा मात्रा स विषये उ अतएव अपि नगरस्य उपरि यतः प्रतिशतं कतरः कालः साधनानि भूत तथापि जात सम्बन्धि अन्यत्‌ ग अतः अस्माकं स्वकीयाः अस्माकं इदानीं अन्तः इत्यादयः भवन्तः इत्यादयः एते एताः तस्य अस्य इदम् एते तेषां तेषां तेषां तान् तेषां तेषां तेषां समानः सः एकः च तादृशाः बहवः अन्ये च वदन्ति यत् कियत् कस्मै कस्मै यस्मै यस्मै यस्मै यस्मै न अतिनीचः किन्तु प्रथमं सम्पूर्णतया ततः चिरकालानन्तरं पुस्तकं सम्पूर्णतया अन्तः किन्तु अत्र वा इह इव श्रद्धाय अवशिष्यते परन्तु अन्ये वर्गाः सन्ति ते सन्ति शक्नुवन्ति सर्वे मिलित्वा सर्वे एकत्र"'.split(" ")),e.sa.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var r=e.wordcut;r.init(),e.sa.tokenizer=function(t){if(!arguments.length||null==t||void 0==t)return[];if(Array.isArray(t))return t.map(function(r){return isLunr2?new e.Token(r.toLowerCase()):r.toLowerCase()});var i=t.toString().toLowerCase().replace(/^\s+/,"");return r.cut(i).split("|")},e.Pipeline.registerFunction(e.sa.stemmer,"stemmer-sa"),e.Pipeline.registerFunction(e.sa.stopWordFilter,"stopWordFilter-sa")}});

View File

@ -0,0 +1 @@
!function(r,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(r.lunr)}(this,function(){return function(r){r.stemmerSupport={Among:function(r,t,i,s){if(this.toCharArray=function(r){for(var t=r.length,i=new Array(t),s=0;s<t;s++)i[s]=r.charCodeAt(s);return i},!r&&""!=r||!t&&0!=t||!i)throw"Bad Among initialisation: s:"+r+", substring_i: "+t+", result: "+i;this.s_size=r.length,this.s=this.toCharArray(r),this.substring_i=t,this.result=i,this.method=s},SnowballProgram:function(){var r;return{bra:0,ket:0,limit:0,cursor:0,limit_backward:0,setCurrent:function(t){r=t,this.cursor=0,this.limit=t.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},getCurrent:function(){var t=r;return r=null,t},in_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&(e-=i,t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e>s||e<i)return this.cursor++,!0;if(e-=i,!(t[e>>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e<i)return this.cursor--,!0;if(e-=i,!(t[e>>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor+s)!=i.charCodeAt(s))return!1;return this.cursor+=t,!0},eq_s_b:function(t,i){if(this.cursor-this.limit_backward<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor-t+s)!=i.charCodeAt(s))return!1;return this.cursor-=t,!0},find_among:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=t[a],m=l;m<_.s_size;m++){if(n+l==u){f=-1;break}if(f=r.charCodeAt(n+l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=t[a],m=_.s_size-1-l;m>=0;m--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var _=t[s];if(o>=_.s_size){if(this.cursor=n-_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n-_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}});

View File

@ -0,0 +1,18 @@
/*!
* Lunr languages, `Swedish` language
* https://github.com/MihaiValentin/lunr-languages
*
* Copyright 2014, Mihai Valentin
* http://www.mozilla.org/MPL/
*/
/*!
* based on
* Snowball JavaScript Library v0.3
* http://code.google.com/p/urim/
* http://snowball.tartarus.org/
*
* Copyright 2010, Oleg Mazko
* http://www.mozilla.org/MPL/
*/
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=function(){var r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){function e(){var e,r=w.cursor+3;if(o=w.limit,0<=r||r<=w.limit){for(a=r;;){if(e=w.cursor,w.in_grouping(l,97,246)){w.cursor=e;break}if(w.cursor=e,w.cursor>=w.limit)return;w.cursor++}for(;!w.out_grouping(l,97,246);){if(w.cursor>=w.limit)return;w.cursor++}o=w.cursor,o<a&&(o=a)}}function t(){var e,r=w.limit_backward;if(w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(u,37),w.limit_backward=r,e))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.in_grouping_b(d,98,121)&&w.slice_del()}}function i(){var e=w.limit_backward;w.cursor>=o&&(w.limit_backward=o,w.cursor=w.limit,w.find_among_b(c,7)&&(w.cursor=w.limit,w.ket=w.cursor,w.cursor>w.limit_backward&&(w.bra=--w.cursor,w.slice_del())),w.limit_backward=e)}function s(){var e,r;if(w.cursor>=o){if(r=w.limit_backward,w.limit_backward=o,w.cursor=w.limit,w.ket=w.cursor,e=w.find_among_b(m,5))switch(w.bra=w.cursor,e){case 1:w.slice_del();break;case 2:w.slice_from("lös");break;case 3:w.slice_from("full")}w.limit_backward=r}}var a,o,u=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],c=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],m=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],d=[119,127,149],w=new n;this.setCurrent=function(e){w.setCurrent(e)},this.getCurrent=function(){return w.getCurrent()},this.stem=function(){var r=w.cursor;return e(),w.limit_backward=r,w.cursor=w.limit,t(),w.cursor=w.limit,i(),w.cursor=w.limit,s(),!0}};return function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}}(),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}});

View File

@ -0,0 +1 @@
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.ta=function(){this.pipeline.reset(),this.pipeline.add(e.ta.trimmer,e.ta.stopWordFilter,e.ta.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.ta.stemmer))},e.ta.wordCharacters="஀-உஊ-ஏஐ-ஙச-ட஠-னப-யர-ஹ஺-ிீ-௉ொ-௏ௐ-௙௚-௟௠-௩௪-௯௰-௹௺-௿a-zA-Z--0-9-",e.ta.trimmer=e.trimmerSupport.generateTrimmer(e.ta.wordCharacters),e.Pipeline.registerFunction(e.ta.trimmer,"trimmer-ta"),e.ta.stopWordFilter=e.generateStopWordFilter("அங்கு அங்கே அது அதை அந்த அவர் அவர்கள் அவள் அவன் அவை ஆக ஆகவே ஆகையால் ஆதலால் ஆதலினால் ஆனாலும் ஆனால் இங்கு இங்கே இது இதை இந்த இப்படி இவர் இவர்கள் இவள் இவன் இவை இவ்வளவு உனக்கு உனது உன் உன்னால் எங்கு எங்கே எது எதை எந்த எப்படி எவர் எவர்கள் எவள் எவன் எவை எவ்வளவு எனக்கு எனது எனவே என் என்ன என்னால் ஏது ஏன் தனது தன்னால் தானே தான் நாங்கள் நாம் நான் நீ நீங்கள்".split(" ")),e.ta.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.ta.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.ta.stemmer,"stemmer-ta"),e.Pipeline.registerFunction(e.ta.stopWordFilter,"stopWordFilter-ta")}});

View File

@ -0,0 +1 @@
!function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.te=function(){this.pipeline.reset(),this.pipeline.add(e.te.trimmer,e.te.stopWordFilter,e.te.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.te.stemmer))},e.te.wordCharacters="ఀ-ఄఅ-ఔక-హా-ౌౕ-ౖౘ-ౚౠ-ౡౢ-ౣ౦-౯౸-౿఼ఽ్ౝ౷౤౥",e.te.trimmer=e.trimmerSupport.generateTrimmer(e.te.wordCharacters),e.Pipeline.registerFunction(e.te.trimmer,"trimmer-te"),e.te.stopWordFilter=e.generateStopWordFilter("అందరూ అందుబాటులో అడగండి అడగడం అడ్డంగా అనుగుణంగా అనుమతించు అనుమతిస్తుంది అయితే ఇప్పటికే ఉన్నారు ఎక్కడైనా ఎప్పుడు ఎవరైనా ఎవరో ఏ ఏదైనా ఏమైనప్పటికి ఒక ఒకరు కనిపిస్తాయి కాదు కూడా గా గురించి చుట్టూ చేయగలిగింది తగిన తర్వాత దాదాపు దూరంగా నిజంగా పై ప్రకారం ప్రక్కన మధ్య మరియు మరొక మళ్ళీ మాత్రమే మెచ్చుకో వద్ద వెంట వేరుగా వ్యతిరేకంగా సంబంధం".split(" ")),e.te.stemmer=function(){return function(e){return"function"==typeof e.update?e.update(function(e){return e}):e}}();var t=e.wordcut;t.init(),e.te.tokenizer=function(r){if(!arguments.length||null==r||void 0==r)return[];if(Array.isArray(r))return r.map(function(t){return isLunr2?new e.Token(t.toLowerCase()):t.toLowerCase()});var i=r.toString().toLowerCase().replace(/^\s+/,"");return t.cut(i).split("|")},e.Pipeline.registerFunction(e.te.stemmer,"stemmer-te"),e.Pipeline.registerFunction(e.te.stopWordFilter,"stopWordFilter-te")}});

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.th=function(){this.pipeline.reset(),this.pipeline.add(e.th.trimmer),r?this.tokenizer=e.th.tokenizer:(e.tokenizer&&(e.tokenizer=e.th.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.th.tokenizer))},e.th.wordCharacters="[฀-๿]",e.th.trimmer=e.trimmerSupport.generateTrimmer(e.th.wordCharacters),e.Pipeline.registerFunction(e.th.trimmer,"trimmer-th");var t=e.wordcut;t.init(),e.th.tokenizer=function(i){if(!arguments.length||null==i||void 0==i)return[];if(Array.isArray(i))return i.map(function(t){return r?new e.Token(t):t});var n=i.toString().replace(/^\s+/,"");return t.cut(n).split("|")}}});

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");e.vi=function(){this.pipeline.reset(),this.pipeline.add(e.vi.stopWordFilter,e.vi.trimmer)},e.vi.wordCharacters="[A-Za-ẓ̀͐́͑̉̃̓ÂâÊêÔôĂ-ăĐ-đƠ-ơƯ-ư]",e.vi.trimmer=e.trimmerSupport.generateTrimmer(e.vi.wordCharacters),e.Pipeline.registerFunction(e.vi.trimmer,"trimmer-vi"),e.vi.stopWordFilter=e.generateStopWordFilter("là cái nhưng mà".split(" "))}});

View File

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r(require("@node-rs/jieba")):r()(e.lunr)}(this,function(e){return function(r,t){if(void 0===r)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===r.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var i="2"==r.version[0];r.zh=function(){this.pipeline.reset(),this.pipeline.add(r.zh.trimmer,r.zh.stopWordFilter,r.zh.stemmer),i?this.tokenizer=r.zh.tokenizer:(r.tokenizer&&(r.tokenizer=r.zh.tokenizer),this.tokenizerFn&&(this.tokenizerFn=r.zh.tokenizer))},r.zh.tokenizer=function(n){if(!arguments.length||null==n||void 0==n)return[];if(Array.isArray(n))return n.map(function(e){return i?new r.Token(e.toLowerCase()):e.toLowerCase()});t&&e.load(t);var o=n.toString().trim().toLowerCase(),s=[];e.cut(o,!0).forEach(function(e){s=s.concat(e.split(" "))}),s=s.filter(function(e){return!!e});var u=0;return s.map(function(e,t){if(i){var n=o.indexOf(e,u),s={};return s.position=[n,e.length],s.index=t,u=n,new r.Token(e,s)}return e})},r.zh.wordCharacters="\\w一-龥",r.zh.trimmer=r.trimmerSupport.generateTrimmer(r.zh.wordCharacters),r.Pipeline.registerFunction(r.zh.trimmer,"trimmer-zh"),r.zh.stemmer=function(){return function(e){return e}}(),r.Pipeline.registerFunction(r.zh.stemmer,"stemmer-zh"),r.zh.stopWordFilter=r.generateStopWordFilter("的 一 不 在 人 有 是 为 為 以 于 於 上 他 而 后 後 之 来 來 及 了 因 下 可 到 由 这 這 与 與 也 此 但 并 並 个 個 其 已 无 無 小 我 们 們 起 最 再 今 去 好 只 又 或 很 亦 某 把 那 你 乃 它 吧 被 比 别 趁 当 當 从 從 得 打 凡 儿 兒 尔 爾 该 該 各 给 給 跟 和 何 还 還 即 几 幾 既 看 据 據 距 靠 啦 另 么 麽 每 嘛 拿 哪 您 凭 憑 且 却 卻 让 讓 仍 啥 如 若 使 谁 誰 虽 雖 随 隨 同 所 她 哇 嗡 往 些 向 沿 哟 喲 用 咱 则 則 怎 曾 至 致 着 著 诸 諸 自".split(" ")),r.Pipeline.registerFunction(r.zh.stopWordFilter,"stopWordFilter-zh")}});

View File

@ -0,0 +1,206 @@
/**
* export the module via AMD, CommonJS or as a browser global
* Export code from https://github.com/umdjs/umd/blob/master/returnExports.js
*/
;(function (root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(factory)
} else if (typeof exports === 'object') {
/**
* Node. Does not work with strict CommonJS, but
* only CommonJS-like environments that support module.exports,
* like Node.
*/
module.exports = factory()
} else {
// Browser globals (root is window)
factory()(root.lunr);
}
}(this, function () {
/**
* Just return a value to define the module export.
* This example returns an object, but the module
* can return a function as the exported value.
*/
return function(lunr) {
// TinySegmenter 0.1 -- Super compact Japanese tokenizer in Javascript
// (c) 2008 Taku Kudo <taku@chasen.org>
// TinySegmenter is freely distributable under the terms of a new BSD licence.
// For details, see http://chasen.org/~taku/software/TinySegmenter/LICENCE.txt
function TinySegmenter() {
var patterns = {
"[一二三四五六七八九十百千万億兆]":"M",
"[一-龠々〆ヵヶ]":"H",
"[ぁ-ん]":"I",
"[ァ-ヴーア-ン゙ー]":"K",
"[a-zA-Z--]":"A",
"[0-9-]":"N"
}
this.chartype_ = [];
for (var i in patterns) {
var regexp = new RegExp(i);
this.chartype_.push([regexp, patterns[i]]);
}
this.BIAS__ = -332
this.BC1__ = {"HH":6,"II":2461,"KH":406,"OH":-1378};
this.BC2__ = {"AA":-3267,"AI":2744,"AN":-878,"HH":-4070,"HM":-1711,"HN":4012,"HO":3761,"IA":1327,"IH":-1184,"II":-1332,"IK":1721,"IO":5492,"KI":3831,"KK":-8741,"MH":-3132,"MK":3334,"OO":-2920};
this.BC3__ = {"HH":996,"HI":626,"HK":-721,"HN":-1307,"HO":-836,"IH":-301,"KK":2762,"MK":1079,"MM":4034,"OA":-1652,"OH":266};
this.BP1__ = {"BB":295,"OB":304,"OO":-125,"UB":352};
this.BP2__ = {"BO":60,"OO":-1762};
this.BQ1__ = {"BHH":1150,"BHM":1521,"BII":-1158,"BIM":886,"BMH":1208,"BNH":449,"BOH":-91,"BOO":-2597,"OHI":451,"OIH":-296,"OKA":1851,"OKH":-1020,"OKK":904,"OOO":2965};
this.BQ2__ = {"BHH":118,"BHI":-1159,"BHM":466,"BIH":-919,"BKK":-1720,"BKO":864,"OHH":-1139,"OHM":-181,"OIH":153,"UHI":-1146};
this.BQ3__ = {"BHH":-792,"BHI":2664,"BII":-299,"BKI":419,"BMH":937,"BMM":8335,"BNN":998,"BOH":775,"OHH":2174,"OHM":439,"OII":280,"OKH":1798,"OKI":-793,"OKO":-2242,"OMH":-2402,"OOO":11699};
this.BQ4__ = {"BHH":-3895,"BIH":3761,"BII":-4654,"BIK":1348,"BKK":-1806,"BMI":-3385,"BOO":-12396,"OAH":926,"OHH":266,"OHK":-2036,"ONN":-973};
this.BW1__ = {",と":660,",同":727,"B1あ":1404,"B1同":542,"、と":660,"、同":727,"」と":1682,"あっ":1505,"いう":1743,"いっ":-2055,"いる":672,"うし":-4817,"うん":665,"から":3472,"がら":600,"こう":-790,"こと":2083,"こん":-1262,"さら":-4143,"さん":4573,"した":2641,"して":1104,"すで":-3399,"そこ":1977,"それ":-871,"たち":1122,"ため":601,"った":3463,"つい":-802,"てい":805,"てき":1249,"でき":1127,"です":3445,"では":844,"とい":-4915,"とみ":1922,"どこ":3887,"ない":5713,"なっ":3015,"など":7379,"なん":-1113,"にし":2468,"には":1498,"にも":1671,"に対":-912,"の一":-501,"の中":741,"ませ":2448,"まで":1711,"まま":2600,"まる":-2155,"やむ":-1947,"よっ":-2565,"れた":2369,"れで":-913,"をし":1860,"を見":731,"亡く":-1886,"京都":2558,"取り":-2784,"大き":-2604,"大阪":1497,"平方":-2314,"引き":-1336,"日本":-195,"本当":-2423,"毎日":-2113,"目指":-724,"B1あ":1404,"B1同":542,"」と":1682};
this.BW2__ = {"..":-11822,"11":-669,"――":-5730,"":-13175,"いう":-1609,"うか":2490,"かし":-1350,"かも":-602,"から":-7194,"かれ":4612,"がい":853,"がら":-3198,"きた":1941,"くな":-1597,"こと":-8392,"この":-4193,"させ":4533,"され":13168,"さん":-3977,"しい":-1819,"しか":-545,"した":5078,"して":972,"しな":939,"その":-3744,"たい":-1253,"たた":-662,"ただ":-3857,"たち":-786,"たと":1224,"たは":-939,"った":4589,"って":1647,"っと":-2094,"てい":6144,"てき":3640,"てく":2551,"ては":-3110,"ても":-3065,"でい":2666,"でき":-1528,"でし":-3828,"です":-4761,"でも":-4203,"とい":1890,"とこ":-1746,"とと":-2279,"との":720,"とみ":5168,"とも":-3941,"ない":-2488,"なが":-1313,"など":-6509,"なの":2614,"なん":3099,"にお":-1615,"にし":2748,"にな":2454,"によ":-7236,"に対":-14943,"に従":-4688,"に関":-11388,"のか":2093,"ので":-7059,"のに":-6041,"のの":-6125,"はい":1073,"はが":-1033,"はず":-2532,"ばれ":1813,"まし":-1316,"まで":-6621,"まれ":5409,"めて":-3153,"もい":2230,"もの":-10713,"らか":-944,"らし":-1611,"らに":-1897,"りし":651,"りま":1620,"れた":4270,"れて":849,"れば":4114,"ろう":6067,"われ":7901,"を通":-11877,"んだ":728,"んな":-4115,"一人":602,"一方":-1375,"一日":970,"一部":-1051,"上が":-4479,"会社":-1116,"出て":2163,"分の":-7758,"同党":970,"同日":-913,"大阪":-2471,"委員":-1250,"少な":-1050,"年度":-8669,"年間":-1626,"府県":-2363,"手権":-1982,"新聞":-4066,"日新":-722,"日本":-7068,"日米":3372,"曜日":-601,"朝鮮":-2355,"本人":-2697,"東京":-1543,"然と":-1384,"社会":-1276,"立て":-990,"第に":-1612,"米国":-4268,"":-669};
this.BW3__ = {"あた":-2194,"あり":719,"ある":3846,"い.":-1185,"い。":-1185,"いい":5308,"いえ":2079,"いく":3029,"いた":2056,"いっ":1883,"いる":5600,"いわ":1527,"うち":1117,"うと":4798,"えと":1454,"か.":2857,"か。":2857,"かけ":-743,"かっ":-4098,"かに":-669,"から":6520,"かり":-2670,"が,":1816,"が、":1816,"がき":-4855,"がけ":-1127,"がっ":-913,"がら":-4977,"がり":-2064,"きた":1645,"けど":1374,"こと":7397,"この":1542,"ころ":-2757,"さい":-714,"さを":976,"し,":1557,"し、":1557,"しい":-3714,"した":3562,"して":1449,"しな":2608,"しま":1200,"す.":-1310,"す。":-1310,"する":6521,"ず,":3426,"ず、":3426,"ずに":841,"そう":428,"た.":8875,"た。":8875,"たい":-594,"たの":812,"たり":-1183,"たる":-853,"だ.":4098,"だ。":4098,"だっ":1004,"った":-4748,"って":300,"てい":6240,"てお":855,"ても":302,"です":1437,"でに":-1482,"では":2295,"とう":-1387,"とし":2266,"との":541,"とも":-3543,"どう":4664,"ない":1796,"なく":-903,"など":2135,"に,":-1021,"に、":-1021,"にし":1771,"にな":1906,"には":2644,"の,":-724,"の、":-724,"の子":-1000,"は,":1337,"は、":1337,"べき":2181,"まし":1113,"ます":6943,"まっ":-1549,"まで":6154,"まれ":-793,"らし":1479,"られ":6820,"るる":3818,"れ,":854,"れ、":854,"れた":1850,"れて":1375,"れば":-3246,"れる":1091,"われ":-605,"んだ":606,"んで":798,"カ月":990,"会議":860,"入り":1232,"大会":2217,"始め":1681,"市":965,"新聞":-5055,"日,":974,"日、":974,"社会":2024,"カ月":990};
this.TC1__ = {"AAA":1093,"HHH":1029,"HHM":580,"HII":998,"HOH":-390,"HOM":-331,"IHI":1169,"IOH":-142,"IOI":-1015,"IOM":467,"MMH":187,"OOI":-1832};
this.TC2__ = {"HHO":2088,"HII":-1023,"HMM":-1154,"IHI":-1965,"KKH":703,"OII":-2649};
this.TC3__ = {"AAA":-294,"HHH":346,"HHI":-341,"HII":-1088,"HIK":731,"HOH":-1486,"IHH":128,"IHI":-3041,"IHO":-1935,"IIH":-825,"IIM":-1035,"IOI":-542,"KHH":-1216,"KKA":491,"KKH":-1217,"KOK":-1009,"MHH":-2694,"MHM":-457,"MHO":123,"MMH":-471,"NNH":-1689,"NNO":662,"OHO":-3393};
this.TC4__ = {"HHH":-203,"HHI":1344,"HHK":365,"HHM":-122,"HHN":182,"HHO":669,"HIH":804,"HII":679,"HOH":446,"IHH":695,"IHO":-2324,"IIH":321,"III":1497,"IIO":656,"IOO":54,"KAK":4845,"KKA":3386,"KKK":3065,"MHH":-405,"MHI":201,"MMH":-241,"MMM":661,"MOM":841};
this.TQ1__ = {"BHHH":-227,"BHHI":316,"BHIH":-132,"BIHH":60,"BIII":1595,"BNHH":-744,"BOHH":225,"BOOO":-908,"OAKK":482,"OHHH":281,"OHIH":249,"OIHI":200,"OIIH":-68};
this.TQ2__ = {"BIHH":-1401,"BIII":-1033,"BKAK":-543,"BOOO":-5591};
this.TQ3__ = {"BHHH":478,"BHHM":-1073,"BHIH":222,"BHII":-504,"BIIH":-116,"BIII":-105,"BMHI":-863,"BMHM":-464,"BOMH":620,"OHHH":346,"OHHI":1729,"OHII":997,"OHMH":481,"OIHH":623,"OIIH":1344,"OKAK":2792,"OKHH":587,"OKKA":679,"OOHH":110,"OOII":-685};
this.TQ4__ = {"BHHH":-721,"BHHM":-3604,"BHII":-966,"BIIH":-607,"BIII":-2181,"OAAA":-2763,"OAKK":180,"OHHH":-294,"OHHI":2446,"OHHO":480,"OHIH":-1573,"OIHH":1935,"OIHI":-493,"OIIH":626,"OIII":-4007,"OKAK":-8156};
this.TW1__ = {"につい":-4681,"東京都":2026};
this.TW2__ = {"ある程":-2049,"いった":-1256,"ころが":-2434,"しょう":3873,"その後":-4430,"だって":-1049,"ていた":1833,"として":-4657,"ともに":-4517,"もので":1882,"一気に":-792,"初めて":-1512,"同時に":-8097,"大きな":-1255,"対して":-2721,"社会党":-3216};
this.TW3__ = {"いただ":-1734,"してい":1314,"として":-4314,"につい":-5483,"にとっ":-5989,"に当た":-6247,"ので,":-727,"ので、":-727,"のもの":-600,"れから":-3752,"十二月":-2287};
this.TW4__ = {"いう.":8576,"いう。":8576,"からな":-2348,"してい":2958,"たが,":1516,"たが、":1516,"ている":1538,"という":1349,"ました":5543,"ません":1097,"ようと":-4258,"よると":5865};
this.UC1__ = {"A":484,"K":93,"M":645,"O":-505};
this.UC2__ = {"A":819,"H":1059,"I":409,"M":3987,"N":5775,"O":646};
this.UC3__ = {"A":-1370,"I":2311};
this.UC4__ = {"A":-2643,"H":1809,"I":-1032,"K":-3450,"M":3565,"N":3876,"O":6646};
this.UC5__ = {"H":313,"I":-1238,"K":-799,"M":539,"O":-831};
this.UC6__ = {"H":-506,"I":-253,"K":87,"M":247,"O":-387};
this.UP1__ = {"O":-214};
this.UP2__ = {"B":69,"O":935};
this.UP3__ = {"B":189};
this.UQ1__ = {"BH":21,"BI":-12,"BK":-99,"BN":142,"BO":-56,"OH":-95,"OI":477,"OK":410,"OO":-2422};
this.UQ2__ = {"BH":216,"BI":113,"OK":1759};
this.UQ3__ = {"BA":-479,"BH":42,"BI":1913,"BK":-7198,"BM":3160,"BN":6427,"BO":14761,"OI":-827,"ON":-3212};
this.UW1__ = {",":156,"、":156,"「":-463,"あ":-941,"う":-127,"が":-553,"き":121,"こ":505,"で":-201,"と":-547,"ど":-123,"に":-789,"の":-185,"は":-847,"も":-466,"や":-470,"よ":182,"ら":-292,"り":208,"れ":169,"を":-446,"ん":-137,"・":-135,"主":-402,"京":-268,"区":-912,"午":871,"国":-460,"大":561,"委":729,"市":-411,"日":-141,"理":361,"生":-408,"県":-386,"都":-718,"「":-463,"・":-135};
this.UW2__ = {",":-829,"、":-829,"":892,"「":-645,"」":3145,"あ":-538,"い":505,"う":134,"お":-502,"か":1454,"が":-856,"く":-412,"こ":1141,"さ":878,"ざ":540,"し":1529,"す":-675,"せ":300,"そ":-1011,"た":188,"だ":1837,"つ":-949,"て":-291,"で":-268,"と":-981,"ど":1273,"な":1063,"に":-1764,"の":130,"は":-409,"ひ":-1273,"べ":1261,"ま":600,"も":-1263,"や":-402,"よ":1639,"り":-579,"る":-694,"れ":571,"を":-2516,"ん":2095,"ア":-587,"カ":306,"キ":568,"ッ":831,"三":-758,"不":-2150,"世":-302,"中":-968,"主":-861,"事":492,"人":-123,"会":978,"保":362,"入":548,"初":-3025,"副":-1566,"北":-3414,"区":-422,"大":-1769,"天":-865,"太":-483,"子":-1519,"学":760,"実":1023,"小":-2009,"市":-813,"年":-1060,"強":1067,"手":-1519,"揺":-1033,"政":1522,"文":-1355,"新":-1682,"日":-1815,"明":-1462,"最":-630,"朝":-1843,"本":-1650,"東":-931,"果":-665,"次":-2378,"民":-180,"気":-1740,"理":752,"発":529,"目":-1584,"相":-242,"県":-1165,"立":-763,"第":810,"米":509,"自":-1353,"行":838,"西":-744,"見":-3874,"調":1010,"議":1198,"込":3041,"開":1758,"間":-1257,"「":-645,"」":3145,"ッ":831,"ア":-587,"カ":306,"キ":568};
this.UW3__ = {",":4889,"1":-800,"":-1723,"、":4889,"々":-2311,"":5827,"」":2670,"〓":-3573,"あ":-2696,"い":1006,"う":2342,"え":1983,"お":-4864,"か":-1163,"が":3271,"く":1004,"け":388,"げ":401,"こ":-3552,"ご":-3116,"さ":-1058,"し":-395,"す":584,"せ":3685,"そ":-5228,"た":842,"ち":-521,"っ":-1444,"つ":-1081,"て":6167,"で":2318,"と":1691,"ど":-899,"な":-2788,"に":2745,"の":4056,"は":4555,"ひ":-2171,"ふ":-1798,"へ":1199,"ほ":-5516,"ま":-4384,"み":-120,"め":1205,"も":2323,"や":-788,"よ":-202,"ら":727,"り":649,"る":5905,"れ":2773,"わ":-1207,"を":6620,"ん":-518,"ア":551,"グ":1319,"ス":874,"ッ":-1350,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278,"・":-3794,"一":-1619,"下":-1759,"世":-2087,"両":3815,"中":653,"主":-758,"予":-1193,"二":974,"人":2742,"今":792,"他":1889,"以":-1368,"低":811,"何":4265,"作":-361,"保":-2439,"元":4858,"党":3593,"全":1574,"公":-3030,"六":755,"共":-1880,"円":5807,"再":3095,"分":457,"初":2475,"別":1129,"前":2286,"副":4437,"力":365,"動":-949,"務":-1872,"化":1327,"北":-1038,"区":4646,"千":-2309,"午":-783,"協":-1006,"口":483,"右":1233,"各":3588,"合":-241,"同":3906,"和":-837,"員":4513,"国":642,"型":1389,"場":1219,"外":-241,"妻":2016,"学":-1356,"安":-423,"実":-1008,"家":1078,"小":-513,"少":-3102,"州":1155,"市":3197,"平":-1804,"年":2416,"広":-1030,"府":1605,"度":1452,"建":-2352,"当":-3885,"得":1905,"思":-1291,"性":1822,"戸":-488,"指":-3973,"政":-2013,"教":-1479,"数":3222,"文":-1489,"新":1764,"日":2099,"旧":5792,"昨":-661,"時":-1248,"曜":-951,"最":-937,"月":4125,"期":360,"李":3094,"村":364,"東":-805,"核":5156,"森":2438,"業":484,"氏":2613,"民":-1694,"決":-1073,"法":1868,"海":-495,"無":979,"物":461,"特":-3850,"生":-273,"用":914,"町":1215,"的":7313,"直":-1835,"省":792,"県":6293,"知":-1528,"私":4231,"税":401,"立":-960,"第":1201,"米":7767,"系":3066,"約":3663,"級":1384,"統":-4229,"総":1163,"線":1255,"者":6457,"能":725,"自":-2869,"英":785,"見":1044,"調":-562,"財":-733,"費":1777,"車":1835,"軍":1375,"込":-1504,"通":-1136,"選":-681,"郎":1026,"郡":4404,"部":1200,"金":2163,"長":421,"開":-1432,"間":1302,"関":-1282,"雨":2009,"電":-1045,"非":2066,"駅":1620,"":-800,"」":2670,"・":-3794,"ッ":-1350,"ア":551,"グ":1319,"ス":874,"ト":521,"ム":1109,"ル":1591,"ロ":2201,"ン":278};
this.UW4__ = {",":3930,".":3508,"―":-4841,"、":3930,"。":3508,"":4999,"「":1895,"」":3798,"〓":-5156,"あ":4752,"い":-3435,"う":-640,"え":-2514,"お":2405,"か":530,"が":6006,"き":-4482,"ぎ":-3821,"く":-3788,"け":-4376,"げ":-4734,"こ":2255,"ご":1979,"さ":2864,"し":-843,"じ":-2506,"す":-731,"ず":1251,"せ":181,"そ":4091,"た":5034,"だ":5408,"ち":-3654,"っ":-5882,"つ":-1659,"て":3994,"で":7410,"と":4547,"な":5433,"に":6499,"ぬ":1853,"ね":1413,"の":7396,"は":8578,"ば":1940,"ひ":4249,"び":-4134,"ふ":1345,"へ":6665,"べ":-744,"ほ":1464,"ま":1051,"み":-2082,"む":-882,"め":-5046,"も":4169,"ゃ":-2666,"や":2795,"ょ":-1544,"よ":3351,"ら":-2922,"り":-9726,"る":-14896,"れ":-2613,"ろ":-4570,"わ":-1783,"を":13150,"ん":-2352,"カ":2145,"コ":1789,"セ":1287,"ッ":-724,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637,"・":-4371,"ー":-11870,"一":-2069,"中":2210,"予":782,"事":-190,"井":-1768,"人":1036,"以":544,"会":950,"体":-1286,"作":530,"側":4292,"先":601,"党":-2006,"共":-1212,"内":584,"円":788,"初":1347,"前":1623,"副":3879,"力":-302,"動":-740,"務":-2715,"化":776,"区":4517,"協":1013,"参":1555,"合":-1834,"和":-681,"員":-910,"器":-851,"回":1500,"国":-619,"園":-1200,"地":866,"場":-1410,"塁":-2094,"士":-1413,"多":1067,"大":571,"子":-4802,"学":-1397,"定":-1057,"寺":-809,"小":1910,"屋":-1328,"山":-1500,"島":-2056,"川":-2667,"市":2771,"年":374,"庁":-4556,"後":456,"性":553,"感":916,"所":-1566,"支":856,"改":787,"政":2182,"教":704,"文":522,"方":-856,"日":1798,"時":1829,"最":845,"月":-9066,"木":-485,"来":-442,"校":-360,"業":-1043,"氏":5388,"民":-2716,"気":-910,"沢":-939,"済":-543,"物":-735,"率":672,"球":-1267,"生":-1286,"産":-1101,"田":-2900,"町":1826,"的":2586,"目":922,"省":-3485,"県":2997,"空":-867,"立":-2112,"第":788,"米":2937,"系":786,"約":2171,"経":1146,"統":-1169,"総":940,"線":-994,"署":749,"者":2145,"能":-730,"般":-852,"行":-792,"規":792,"警":-1184,"議":-244,"谷":-1000,"賞":730,"車":-1481,"軍":1158,"輪":-1433,"込":-3370,"近":929,"道":-1291,"選":2596,"郎":-4866,"都":1192,"野":-1100,"銀":-2213,"長":357,"間":-2344,"院":-2297,"際":-2604,"電":-878,"領":-1659,"題":-792,"館":-1984,"首":1749,"高":2120,"「":1895,"」":3798,"・":-4371,"ッ":-724,"ー":-11870,"カ":2145,"コ":1789,"セ":1287,"ト":-403,"メ":-1635,"ラ":-881,"リ":-541,"ル":-856,"ン":-3637};
this.UW5__ = {",":465,".":-299,"1":-514,"E2":-32768,"]":-2762,"、":465,"。":-299,"「":363,"あ":1655,"い":331,"う":-503,"え":1199,"お":527,"か":647,"が":-421,"き":1624,"ぎ":1971,"く":312,"げ":-983,"さ":-1537,"し":-1371,"す":-852,"だ":-1186,"ち":1093,"っ":52,"つ":921,"て":-18,"で":-850,"と":-127,"ど":1682,"な":-787,"に":-1224,"の":-635,"は":-578,"べ":1001,"み":502,"め":865,"ゃ":3350,"ょ":854,"り":-208,"る":429,"れ":504,"わ":419,"を":-1264,"ん":327,"イ":241,"ル":451,"ン":-343,"中":-871,"京":722,"会":-1153,"党":-654,"務":3519,"区":-901,"告":848,"員":2104,"大":-1296,"学":-548,"定":1785,"嵐":-1304,"市":-2991,"席":921,"年":1763,"思":872,"所":-814,"挙":1618,"新":-1682,"日":218,"月":-4353,"査":932,"格":1356,"機":-1508,"氏":-1347,"田":240,"町":-3912,"的":-3149,"相":1319,"省":-1052,"県":-4003,"研":-997,"社":-278,"空":-813,"統":1955,"者":-2233,"表":663,"語":-1073,"議":1219,"選":-1018,"郎":-368,"長":786,"間":1191,"題":2368,"館":-689,"":-514,"":-32768,"「":363,"イ":241,"ル":451,"ン":-343};
this.UW6__ = {",":227,".":808,"1":-270,"E1":306,"、":227,"。":808,"あ":-307,"う":189,"か":241,"が":-73,"く":-121,"こ":-200,"じ":1782,"す":383,"た":-428,"っ":573,"て":-1014,"で":101,"と":-105,"な":-253,"に":-149,"の":-417,"は":-236,"も":-206,"り":187,"る":-135,"を":195,"ル":-673,"ン":-496,"一":-277,"中":201,"件":-800,"会":624,"前":302,"区":1792,"員":-1212,"委":798,"学":-960,"市":887,"広":-695,"後":535,"業":-697,"相":753,"社":-507,"福":974,"空":-822,"者":1811,"連":463,"郎":1082,"":-270,"":306,"ル":-673,"ン":-496};
return this;
}
TinySegmenter.prototype.ctype_ = function(str) {
for (var i in this.chartype_) {
if (str.match(this.chartype_[i][0])) {
return this.chartype_[i][1];
}
}
return "O";
}
TinySegmenter.prototype.ts_ = function(v) {
if (v) { return v; }
return 0;
}
TinySegmenter.prototype.segment = function(input) {
if (input == null || input == undefined || input == "") {
return [];
}
var result = [];
var seg = ["B3","B2","B1"];
var ctype = ["O","O","O"];
var o = input.split("");
for (i = 0; i < o.length; ++i) {
seg.push(o[i]);
ctype.push(this.ctype_(o[i]))
}
seg.push("E1");
seg.push("E2");
seg.push("E3");
ctype.push("O");
ctype.push("O");
ctype.push("O");
var word = seg[3];
var p1 = "U";
var p2 = "U";
var p3 = "U";
for (var i = 4; i < seg.length - 3; ++i) {
var score = this.BIAS__;
var w1 = seg[i-3];
var w2 = seg[i-2];
var w3 = seg[i-1];
var w4 = seg[i];
var w5 = seg[i+1];
var w6 = seg[i+2];
var c1 = ctype[i-3];
var c2 = ctype[i-2];
var c3 = ctype[i-1];
var c4 = ctype[i];
var c5 = ctype[i+1];
var c6 = ctype[i+2];
score += this.ts_(this.UP1__[p1]);
score += this.ts_(this.UP2__[p2]);
score += this.ts_(this.UP3__[p3]);
score += this.ts_(this.BP1__[p1 + p2]);
score += this.ts_(this.BP2__[p2 + p3]);
score += this.ts_(this.UW1__[w1]);
score += this.ts_(this.UW2__[w2]);
score += this.ts_(this.UW3__[w3]);
score += this.ts_(this.UW4__[w4]);
score += this.ts_(this.UW5__[w5]);
score += this.ts_(this.UW6__[w6]);
score += this.ts_(this.BW1__[w2 + w3]);
score += this.ts_(this.BW2__[w3 + w4]);
score += this.ts_(this.BW3__[w4 + w5]);
score += this.ts_(this.TW1__[w1 + w2 + w3]);
score += this.ts_(this.TW2__[w2 + w3 + w4]);
score += this.ts_(this.TW3__[w3 + w4 + w5]);
score += this.ts_(this.TW4__[w4 + w5 + w6]);
score += this.ts_(this.UC1__[c1]);
score += this.ts_(this.UC2__[c2]);
score += this.ts_(this.UC3__[c3]);
score += this.ts_(this.UC4__[c4]);
score += this.ts_(this.UC5__[c5]);
score += this.ts_(this.UC6__[c6]);
score += this.ts_(this.BC1__[c2 + c3]);
score += this.ts_(this.BC2__[c3 + c4]);
score += this.ts_(this.BC3__[c4 + c5]);
score += this.ts_(this.TC1__[c1 + c2 + c3]);
score += this.ts_(this.TC2__[c2 + c3 + c4]);
score += this.ts_(this.TC3__[c3 + c4 + c5]);
score += this.ts_(this.TC4__[c4 + c5 + c6]);
// score += this.ts_(this.TC5__[c4 + c5 + c6]);
score += this.ts_(this.UQ1__[p1 + c1]);
score += this.ts_(this.UQ2__[p2 + c2]);
score += this.ts_(this.UQ3__[p3 + c3]);
score += this.ts_(this.BQ1__[p2 + c2 + c3]);
score += this.ts_(this.BQ2__[p2 + c3 + c4]);
score += this.ts_(this.BQ3__[p3 + c2 + c3]);
score += this.ts_(this.BQ4__[p3 + c3 + c4]);
score += this.ts_(this.TQ1__[p2 + c1 + c2 + c3]);
score += this.ts_(this.TQ2__[p2 + c2 + c3 + c4]);
score += this.ts_(this.TQ3__[p3 + c1 + c2 + c3]);
score += this.ts_(this.TQ4__[p3 + c2 + c3 + c4]);
var p = "O";
if (score > 0) {
result.push(word);
word = "";
p = "B";
}
p1 = p2;
p2 = p3;
p3 = p;
word += seg[i];
}
result.push(word);
return result;
}
lunr.TinySegmenter = TinySegmenter;
};
}));

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"version":3,"sources":["src/templates/assets/stylesheets/palette/_scheme.scss","../../../../src/templates/assets/stylesheets/palette.scss","src/templates/assets/stylesheets/palette/_accent.scss","src/templates/assets/stylesheets/palette/_primary.scss","src/templates/assets/stylesheets/utilities/_break.scss"],"names":[],"mappings":"AA2BA,cAGE,6BAME,sDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CACA,mDAAA,CACA,6DAAA,CACA,+DAAA,CACA,gEAAA,CAGA,mDAAA,CACA,gDAAA,CAGA,0BAAA,CACA,mCAAA,CAGA,iCAAA,CACA,kCAAA,CACA,mCAAA,CACA,mCAAA,CACA,kCAAA,CACA,iCAAA,CACA,+CAAA,CACA,6DAAA,CACA,gEAAA,CACA,4DAAA,CACA,4DAAA,CACA,6DAAA,CAGA,6CAAA,CAGA,+CAAA,CAGA,uDAAA,CACA,6DAAA,CACA,2DAAA,CAGA,iCAAA,CAGA,yDAAA,CACA,iEAAA,CAGA,mDAAA,CACA,mDAAA,CAGA,qDAAA,CACA,uDAAA,CAGA,8DAAA,CAKA,8DAAA,CAKA,0DAAA,CAvEA,iBCeF,CD6DE,kHAEE,YC3DJ,CDkFE,yDACE,4BChFJ,CD+EE,2DACE,4BC7EJ,CD4EE,gEACE,4BC1EJ,CDyEE,2DACE,4BCvEJ,CDsEE,yDACE,4BCpEJ,CDmEE,0DACE,4BCjEJ,CDgEE,gEACE,4BC9DJ,CD6DE,0DACE,4BC3DJ,CD0DE,2OACE,4BC/CJ,CDsDA,+FAGE,iCCpDF,CACF,CC/CE,2BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD2CN,CCrDE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDkDN,CC5DE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDyDN,CCnEE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDgEN,CC1EE,8BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDuEN,CCjFE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD8EN,CCxFE,kCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDqFN,CC/FE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD4FN,CCtGE,4BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDmGN,CC7GE,6BACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCD0GN,CCpHE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDiHN,CC3HE,4BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCD2HN,CClIE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDkIN,CCzIE,6BACE,yBAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDyIN,CChJE,8BACE,4BAAA,CACA,2CAAA,CAIE,8BAAA,CACA,qCDgJN,CCvJE,mCACE,4BAAA,CACA,2CAAA,CAOE,yBAAA,CACA,qCDoJN,CEzJE,4BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsJN,CEjKE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8JN,CEzKE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsKN,CEjLE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8KN,CEzLE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsLN,CEjME,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8LN,CEzME,mCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsMN,CEjNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8MN,CEzNE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsNN,CEjOE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8NN,CEzOE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsON,CEjPE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFiPN,CEzPE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFyPN,CEjQE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFiQN,CEzQE,+BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAIE,+BAAA,CACA,sCFyQN,CEjRE,oCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCF8QN,CEzRE,8BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCFsRN,CEjSE,6BACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BF0RN,CE1SE,kCACE,6BAAA,CACA,oCAAA,CACA,mCAAA,CAOE,0BAAA,CACA,sCAAA,CAKA,4BFmSN,CEpRE,sEACE,4BFuRJ,CExRE,+DACE,4BF2RJ,CE5RE,iEACE,4BF+RJ,CEhSE,gEACE,4BFmSJ,CEpSE,iEACE,4BFuSJ,CE9RA,8BACE,mDAAA,CACA,4DAAA,CACA,0DAAA,CACA,oDAAA,CACA,2DAAA,CAGA,4BF+RF,CE5RE,yCACE,+BF8RJ,CE3RI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCF+RN,CG3MI,mCD1EA,+CACE,8CFwRJ,CErRI,qDACE,8CFuRN,CElRE,iEACE,mCFoRJ,CACF,CGtNI,sCDvDA,uCACE,oCFgRJ,CACF,CEvQA,8BACE,kDAAA,CACA,4DAAA,CACA,wDAAA,CACA,oDAAA,CACA,6DAAA,CAGA,4BFwQF,CErQE,yCACE,+BFuQJ,CEpQI,kDAEE,0CAAA,CACA,sCAAA,CAFA,mCFwQN,CEjQE,yCACE,6CFmQJ,CG5NI,0CDhCA,8CACE,gDF+PJ,CACF,CGjOI,0CDvBA,iFACE,6CF2PJ,CACF,CGzPI,sCDKA,uCACE,6CFuPJ,CACF","file":"palette.css"}

View File

@ -1,9 +0,0 @@
---
nav:
- ... | index.md
- ... | members.md
- ... | what_we_do.md
- Documentation: documentation
- SOP: sop
- Events: events
...

View File

@ -1,10 +0,0 @@
---
nav:
- ... | index.md
- Guidelines: guidelines
- Debranding and Patching: debranding
- Composing Releases: compose
- Empanadas: empanadas.md
- Peridot: peridot.md
- References: references
...

View File

@ -1,5 +0,0 @@
---
nav:
- ... | index.md
- Manual Updates in Koji: koji.md
...

View File

@ -1,7 +0,0 @@
---
title: Composing and Managing Releases
---
This section goes over the process of composing a release from a bunch of
packages to repositories, to images. This section also goes over the basics of
working with koji when necessary.

View File

@ -1,5 +0,0 @@
---
title: Updates and Management in Koji, A Manual
---
More to come.

View File

@ -1,5 +0,0 @@
---
nav:
- ... | index.md
- Debranding Information: debrand_info.md
- Patching Guide: patching.md

View File

@ -1,66 +0,0 @@
---
title: Debranding Information
---
This page goes over the methodology and some packages that require changes to their material for acceptance in Rocky Linux. Usually this means there is some text or images in the package that reference upstream trademarks, and these must be swapped out before we can distribute them.
CentOS had a wiki page at one point where it was documented, but it wasn't always up to date. For example, the package **nginx** did not appear on their list, and still had RHEL branding in the CentOS repos. As a result, this forced us to do a deeper investigation into what needs to be changed or altered.
There are a few ways we've determined some of the changes:
* Packages don't build because ID is not `centos`, `rhel`, or `fedora`
* Packages have `?centos` tags in the SPEC file to differentiate from Fedora or RHEL
* Some packages in git.centos.org have an automatic debranding message - This won't be as helpful for 9 and beyond
* RHEL assets were appearing in the installed package(s)
When we need to make changes, it can possibly be one or more of these things:
* URL's should change from Red Hat to a Rocky page (if applicable)
* URL's that are being patched to be Red Hat should be removed (systemd in 8 is an example of this)
* `?centos` is changed to `?rocky`, but this isn't always consistent or sufficient
* Assets need to be changed
* Exceptions come when there is a file being requested from the logos package - We generally have symlinks to deal with this
* Some patches must be made to the source code or spec file
* Packages are built against an "override" release package that uses `ID="rhel"` in `/etc/os-release` to force a build to pose as RHEL (older versions of dotnet and chrony are an example of this)
Current patches (for staging) are [here](https://git.rockylinux.org/staging/patch).
## Packages that need debranding changes:
There is a metadata file that helps track this information for us. It can be located [here](https://git.rockylinux.org/rocky/metadata/-/blob/main/patch.yml) and is separated by section and branch.
In essence, the file goes over these sections:
* `build_patch` -> Packages that may have needed patches to build properly in our environment
* `dnt` or **D**o **N**ot **T**ouch -> These should not be modified or changed
* `custom` -> Custom packages not provided by upstream but can be useful in obsoleting packages or providing some functionality
* `plus` -> Packages that are modified versions of what's in the base or built by normal means but not shipped by Red Hat
* `previous` -> Packages that may have been patched before for debranding or building - They are left as a reference
* `provides` -> Common provides for release, logos, and other rocky/system specific packages
* `override_required` -> Requires an "override" release package to build properly
* `spec_change_only` -> Requires only spec changes to remove functionality that is RHEL specific
* `debrand` -> Packages that are changed/patched to either remove Red Hat references, replace them, or add Rocky Linux as a supported distribution
* Some of these packages will always need to be changed, on a minor or major release schedule.
* Some are potentially upstreamed so then they are no longer patched by us (sos is an example)
* Some packages may still need modification, even if upstreamed (anaconda is an example)
## Packages that need to become other packages:
There is a metadata file that tracks this for us. It can be located [here](https://git.rockylinux.org/rocky/metadata/-/blob/main/patch.yml). The section in particular is called `provides`.
This is for example, `redhat-logos` or `system-logos` is provided or "becomes" `rocky-logos`.
## Packages that Exist in RHEL, but do not exist in most derivatives
For sake of complete information, here is a list of packages that are in RHEL, but may not exist in derivatives. We do not need to worry about these packages:
- insights-client
- Red_Hat_Enterprise_Linux-Release_Notes-8-*
- redhat-access-gui
- redhat-bookmarks
- rhc
- rhc-worker-playbook
- subscription-manager-migration
- subscription-manager-migration-data

View File

@ -1,33 +0,0 @@
---
title: Intro to Debranding with Rocky Linux
---
## What is Debranding?
Certain packages in the upstream RHEL/CentOS have logos, trademarks, and other specific text, images, or multimedia that other entities (like the Rocky Enterprise Software Foundation) are not allowed to redistribute.
A visible, simple example is the Apache web server (package httpd). If you've ever installed it and visited the default web server page, you will see a test page specific to your Linux distro, complete with a "powered by" logo and distro-specific information. While we are allowed to compile and redistribute the Apache web server software, Rocky Linux is NOT allowed to include these trademarked images or distro-specific text.
We must have an automated process that will strip these assets out and replace them with our own branding upon import into our Git.
## How Rocky Debranding Works
Rocky's method for importing packages from the upstream is a tool called **srpmproc**.
Srpmproc's purpose in life is to:
- Clone PACKAGE from our upstream source: gitlab.com/redhat or a relevant source RPM
- Check if Rocky Linux has any debranding patches available for PACKAGE (under https://git.rockylinux.org/patch/PACKAGE)
- If patch/PACKAGE exists, then read the configuration and patches from that repository and apply them
- Commit the results (patched or not) to https://git.rockylinux.org/rpms/PACKAGE
- Do this for every package until we have a full repository of packages in our Git
## Helping with Debrands
There are 2 tasks involved with debranding: Identifying packages that require debranding and developing patches+configs to debrand the necessary packages.
If you want to help with the latter, please see the [patching guide](patching.md) for examples and a case study. If you find something that you suspect is missing branding, you can also file a [bug report](https://bugs.rockylinux.org) instead.
## Debrand Packages Tracking
A list of packages that need debranding is located in the a metadata file in our git [here](https://git.rockylinux.org/rocky/metadata/-/blob/main/patch.yml). This generally does not track status and is simply a reference on what is debranded, if it's no longer debranded (aka Rocky Linux is upstreamed), and other notes.

View File

@ -1,210 +0,0 @@
---
title: Rocky Patching Guide
---
This explains how to debrand/patch a package for the Rocky Linux distribution.
## General Instructions
- First, identify the files in the package that need to be changed. They could be text files, image files, or others. You can identify the file(s) by digging into git.centos.org/rpms/PACKAGE/
- Develop replacements for these files, but with Rocky branding placed instead. Diff/patch files may be needed as well for certain types of text, depends on the content being replaced.
- Replacement files go under https://git.rockylinux.org/patch/PACKAGE/ROCKY/_supporting/
- Config file (specifying how to apply the patches) goes in https://git.rockylinux.org/patch/PACKAGE/ROCKY/CFG/*.cfg
- Note: Use spaces, not tabs.
- When srpmproc goes to import the package to Rocky, it will see the work done in https://git.rockylinux.org/patch/PACKAGE , and apply the stored patches by reading the config file(s) under ROCKY/CFG/*.cfg
## The Patch Config Language
Patching uses simple proto3 config files. The general format is:
```
Action {
file: "OriginalFile"
with_file: "ROCKY/_supporting/RockyReplaceFile"
}
```
A simple example to replace a file:
```
replace {
file: "redhatlogo.png"
with_file: "ROCKY/_supporting/rockylogo.png"
}
```
The file "redhatlogo.png" would be located in under SOURCES/ in the project's Git repository (and SRPM).
## Patch configuration options
* `add`: Adds a file to the sources using the `file` or `lookaside` directive
* `delete`: Deletes a file from the sources using the `file` directive
* `replace`: Replaces a file from the sources using the `file` and `with_file` directives
* `patch`: Performs a patch based on the diff provided in the `file` directive (generated using `git diff`)
* `spec_change`: Allows for spec files to be modified
* `search_and_replace`: Performs a search and replace on a given text for the spec file using the `any/starts_with/ends_with` (true|false), `find` (string to find), `replace` (replacement string), and `n` (integar, `-1` for any) directives.
* `file`: A file can be added to the spec file using the `name` directive to define the file name, the `type` directive (such as `patch`) and then an `add` option that is `true` or `false`
* When `patch` is used, the following options are available:
* `add_to_prep` (true|false)
* `n_path: N` can be specified to add `%patchX -pN` lines into `%prep` assuming the rpm does not use `%autosetup`
* `append`: Appends to a given `field`, such as `Release` with a `value` directive
* `changelog`: Modifies the change log using `author_name`, `author_email`, and `message` directives
Patch configuration structure:
```
.
└── ROCKY
├── CFG
└── _supporting
```
## Case Study: Nginx
**(note: all example data here is currently in the staging/ area of Rocky Linux Git. We will update it when the projects are moved to the production area)**
Let's go over an example debrand, featuring the Nginx web server package.
The source repository is located here: **https://git.centos.org/rpms/nginx**
If we browse one of the c8-* branches, we see under SOURCES/ that there is definitely some content that needs to be debranded:
```
404.html
50x.html
index.html
poweredby.png (binary file in dist-git, referred to in .nginx.metadata)
```
These files all refer to Red Hat inc., and must be replaced before they make it to Rocky Linux.
**1: Come up with the patches:** Each of these files has a Rocky Linux counterpart, and they must be created. **Some of this should be done by the Design Team, especially logo work (#Design on chat)**
**2: Commit patches to the matching patch/PROJECT Git repository** : For example, Nginx patches are located here: **https://git.rockylinux.org/staging/patch/nginx** (staging/ prefix is currently used until our production repos are set up)
**3: Develop a matching config file:** Our example Nginx has this here: **https://git.rockylinux.org/staging/patch/nginx/-/blob/main/ROCKY/CFG/pages.cfg**
It looks like this:
```
replace {
file: "index.html"
with_file: "ROCKY/_supporting/index.html"
}
replace {
file: "404.html"
with_file: "ROCKY/_supporting/404.html"
}
replace {
file: "50x.html"
with_file: "ROCKY/_supporting/50x.html"
}
replace {
file: "poweredby.png"
with_file: "ROCKY/_supporting/poweredby.png"
}
```
**4: Test the import:** Now, when the upstream is imported, we can check the main Rocky **nginx** repository and ensure our updates were successful: **https://git.rockylinux.org/staging/rpms/nginx/** (again, staging/ group is used only for now)
**5: You're Done!** Great! Now do the next one... ;-)
## More Debrand Config Language
The Nginx example showed just the **replace** directive, but there are several more available. They are **add**, **patch**, and **delete.**
Here they are, with examples:
```
# Add a file to the project (file is added to SOURCES/ folder )
add {
file: "ROCKY/_supporting/add_me.txt"
}
# Apply a .patch file (generated using the Linux "patch" utility)
patch {
file: "ROCKY/_supporting/002-test-html.patch"
}
# Delete a file from the source project
delete {
file: "SOURCES/dontneed.txt"
}
```
And the .patch file example looks like this:
```
diff --git a/SOURCES/test.html b/SOURCES/test.html
index 8d91ffd..3f76c3b 100644
--- a/SOURCES/test.html
+++ b/SOURCES/test.html
@@ -1,6 +1,6 @@
<!DOCTYPE html>
<html>
<body>
- <h1>Replace me</h1>
+ <h1>Replace I did!</h1>
</body>
</html>
```
It also supports spec file changes, as it may be necessary. For example, from the anaconda debrand patch repo.
```
add {
file: "ROCKY/_supporting/0002-Rocky-disable-cdn-radiobutton.patch"
}
spec_change {
# Adds a Patch line with the file name as listed above
file {
name: "0002-Rocky-disable-cdn-radiobutton.patch"
type: Patch
add: true
}
# Appends to the end of a field's line, in this case the Release field gets .rocky
append {
field: "Release"
value: ".rocky"
}
# Adds to the change log properly
changelog {
author_name: "Mustafa Gezen"
author_email: "mustafa@rockylinux.org"
message: "Disable CDN and add .rocky to Release"
}
}
```
At the end, the spec file should be changed.
```
Summary: Graphical system installer
Name: anaconda
Version: 33.16.3.26
# Our .rocky appears here
Release: 2%{?dist}.rocky
-- snip --
Patch1: 0001-network-do-not-crash-on-infiniband-devices-activated.patch
# Look, our patch was added!
# Luckily this RPM uses %autosetup, so no %patch lines
Patch2: 0002-Rocky-disable-cdn-radiobutton.patch
-- snip --
# And below the added changelog
%changelog
* Thu Feb 25 2021 Mustafa Gezen <mustafa@rockylinux.org> - 33.16.3.26-2
- Disable CDN and add .rocky to Release
* Thu Oct 22 2020 Radek Vykydal <rvykydal@redhat.com> - 33.16.3.26-2
- network: do not crash on infiniband devices activated in initramfs
(rvykydal)
Resolves: rhbz#1890261
```

View File

@ -1,101 +0,0 @@
---
title: Empanadas
---
This page goes over `empanadas`, which is part of the SIG/Core toolkit. Empanadas
assists SIG/Core is composing repositories, creating ISO's, creating images, and
various other activities in Rocky Linux. It is also used for general testing and
debugging of repositories and its metadata.
## Contact Information
| | |
| - | - |
| **Owner** | SIG/Core (Release Engineering & Infrastructure) |
| **Email Contact** | releng@rockylinux.org |
| **Mattermost Contacts** | `@label` `@neil` |
| **Mattermost Channels** | `~Development` |
## General Information
`empanadas` is a python project using poetry, containing various built-in modules
with the goal to try to emulate the Fedora Project's pungi to an extent. While it
is not perfect, it achieves the very basic goals of creating repositories, images
and ISO's for consumption by the end user. It also has interactions with peridot,
the build system used by the RESF to build the Rocky Linux distribution.
For performing syncs, it relies on the use of podman to perform syncing in a
parallel fashion. This was done because it is not possible to run multiple dnf
transactions at once on a single system and looping one repository at a time is
not sustainable (nor fast).
## Requirements
* Poetry must be installed on the system
* Podman must be installed on the system
* `fpart` must be installed on the system (available in EPEL on EL systems)
* Enough storage should be available if repositories are being synced
* `mock` must be installed if building live images
* System must be an Enterprise Linux system or Fedora with the `%rhel` macro set
## Features
As of this writing, `empanadas` has the following abilities:
* Repository syncing via dnf from a peridot instance or applicable repos
* Per profile dnf repoclosure checking for all applicable repos
* Per profile dnf repoclosure checking for peridot instance repositories
* Basic ISO Building via `lorax`
* Extra ISO Building via `xorriso` for DVD and minimal images
* Live ISO Building using `livemedia-creator` and `mock`
* Anaconda treeinfo builder
* Cloud Image builder
## Installing Empanadas
The below is how to install empanadas from the development branch on a Fedora
system.
```
% dnf install git podman fpart poetry mock -y
% git clone https://git.resf.org/sig_core/toolkit.git -b devel
% cd toolkit/iso/empanadas
% poetry install
```
## Configuring Empanadas
Depending on how you are using empanadas will depend on how your configurations
will be setup.
* `empanadas/common.py`
* `empanadas/config/*.yaml`
* `empanadas/sig/*.yaml`
These configuration files are delicate and can control a wide variety of the
moving parts of empanadas. As these configurations are fairly massive, we
recommend checking the [reference guides](./references/) for deeper details into
configuring for base distribution or "SIG" content.
## Using Empanadas
The most common way to use empanadas is to sync repositories from a peridot
instance. This is performed upon each release or on each set of updates as they
come from upstream. Below lists how to use `empanadas`, as well as the common
options.
Note that for each of these commands, it is fully expected you are running
`poetry run` in the root of empanadas.
```
# Syncs all repositoryes for the "9" release
% poetry run sync_from_peridot --release 9 --clean-old-packages
# Syncs only the BaseOS repository without syncing sources
% poetry run sync_from_peridot --release 9 --clean-old-packages --repo BaseOS --ignore-source
# Syncs only AppStream for ppc64le
% poetry run sync_from_peridot --release 9 --clean-old-packages --repo AppStream --arch ppc64le
```
{% include "resources_bottom.md" %}

View File

@ -1,4 +0,0 @@
---
nav:
- ... | index.md
- ... | rocky_logos_guidelines.md

View File

@ -1,9 +0,0 @@
---
title: Guidelines
---
This section is primarily for documentation and useful information as it
pertains to guidelines for various packages or asset usage.
**Release Engineering has the final "go/no-go" decision on submissions, assets,
images, and the general structure of the release of Rocky Linux.**

View File

@ -1,230 +0,0 @@
---
title: Rocky Logos Package Guidelines
---
This page goes over the basic guidelines for the rocky-logos package, which produces assets for anaconda, wallpapers, and other assets for the distribution.
**Release Engineering has the final "go/no-go" decision on submissios/assets/images in the package.**
## Rocky Logo Assets
In various parts of the package, the Rocky logo will need to exist in multiple forms:
* Green variant
* White variant
This can be in the form of `PNG`, `JPG`, or `SVG` files.
### anaconda
All anaconda image assets will be in `PNG` form. Backgrounds should be transparent with the exception of `rnotes` if applicable.
### Backgrounds
See [Backgrounds Section](#Backgrounds/Wallpapers)
### fedora
`SVG` format of logo assets as `fedora_logo` (color) and `fedora_logo_darkbackground` (white), and a default as `fedora_logo`.
### firstboot
First boot assets. This is generally the sidebar (like the anaconda installer) and a workstation icon. `PNG` format.
### icons/hicolor
Rocky icons will appear here in different resolutions and must be in `PNG` or `SVG` format:
* 16x16/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 22x22/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 24x24/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 32x32/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 36x36/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 48x48/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 96x96/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* 256x256/apps: `PNG`, `system-logo-icon`, `fedora-logo-icon`
* scalable/apps: `SVG`, `fedora-logo-icon`, `org.fedoraproject.AnacondaInstaller.svg`, `start-here.svg`, `xfce4_xicon1.svg`
* symbolic/apps: `SVG`, `org.fedoraproject.AnacondaInstaller-symbolic`
### ipa
IPA specific assets, usually text. They are generally `PNG` or `JPG`:
* `header-logo.png` - Text
* `login-screen-background.jpg` - No text
* `login-screen-logo.png` - Logo + Text
* `product-name.png` - Text
### pixmaps
`PNG` format, these are usually assets used within GNOME, GDM, and other desktop environments.
### plymouth/charge
Typically unchanged and is for the plymouth loading screen
### svg
`SVG` format of logo assets as `fedora_logo` (color) and `fedora_logo_darkbackground` (white)
`color` file dictates background color if applicable
### testpage
`index.html` for httpd/nginx/etc
## Backgrounds/Wallpapers
### Structure
Wallpapers appear in `PNG` format with a backing `XML` file to list out all available resolutions if applicable, as well as defaults.
A defaults file looks at every other `XML` that is a default background provided by the backgrounds package and default options if applicable.
```
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE wallpapers SYSTEM "gnome-wp-list.dtd">
<wallpapers>
<wallpaper deleted="false">
<name>Rocky Linux 9 Default Background - Placeholder Mesh</name>
<filename>/usr/share/backgrounds/rocky-default-1-mesh.xml</filename>
<options>zoom</options>
<author>Louis Abel</author>
<email>label@rockylinux.org</email>
<license>CC-BY-SA 4.0</license>
</wallpaper>
<wallpaper deleted="false">
<name>Rocky Linux 9 Default Background - Placeholder Target</name>
<filename>/usr/share/backgrounds/rocky-default-1-target.xml</filename>
<options>zoom</options>
<author>Louis Abel</author>
<email>label@rockylinux.org</email>
<license>CC-BY-SA 4.0</license>
</wallpaper>
</wallpapers>
```
The wallpaper itself will list every applicable variant of that background if applicable.
```
<background>
<starttime>
<year>2021</year>
<month>10</month>
<day>29</day>
<hour>19</hour>
<minute>21</minute>
<second>19</second>
</starttime>
<static>
<duration>10000000000.0</duration>
<file>
<!-- Wide 16:9 -->
<size width="1920" height="1080">/usr/share/backgrounds/rocky-default-1-mesh-16-9.png</size>
<!-- Wide 16:10 -->
<size width="1920" height="1200">/usr/share/backgrounds/rocky-default-1-mesh-16-10.png</size>
<!-- Standard 4:3 -->
<size width="2048" height="1536">/usr/share/backgrounds/rocky-default-1-mesh-4-3.png</size>
<!-- Normalish 5:4 -->
<size width="1280" height="1024">/usr/share/backgrounds/rocky-default-1-mesh-5-4.png</size>
</file>
</static>
</background>
```
Day/Night Wallpapers have a similar configuration.
```
<background>
<starttime>
<year>2022</year>
<month>01</month>
<day>01</day>
<hour>8</hour>
<minute>00</minute>
<second>00</second>
</starttime>
<!-- This animation will start at 8 AM. -->
<!-- We start with day at 8 AM. It will remain up for 10 hours. -->
<static>
<duration>36000.0</duration>
<file>/usr/share/backgrounds/rocky-default-1-mesh-day.png</file>
</static>
<!-- Day ended and starts to transition to night at 6 PM. The transition lasts for 2 hours, ending at 8 PM. -->
<transition type="overlay">
<duration>7200.0</duration>
<from>/usr/share/backgrounds/rocky-default-1-mesh-day.png</from>
<to>/usr/share/backgrounds/rocky-default-1-mesh-night.png</to>
</transition>
<!-- It's 8 PM, we're showing the night till 6 AM. -->
<static>
<duration>36000.0</duration>
<file>/usr/share/backgrounds/rocky-default-1-mesh-night.png</file>
</static>
<!-- It's 6 AM, and we're starting to transition to day. Transition completes at 8 AM. -->
<transition type="overlay">
<duration>7200.0</duration>
<from>/usr/share/backgrounds/rocky-default-1-mesh-night.png</from>
<to>/usr/share/backgrounds/rocky-default-1-mesh-day.png</to>
</transition>
</background>
```
### Guidelines
This section goes over the general guidelines for the main backgrounds included in the distribution.
**Note**: It is **highly recommended and encouraged** that a submission should be the highest resolution as noted below. See the [note](#minimum-resolutions) on minimum resolutions.
* **General Theme**: Each Rocky release has a codename, and thus is the general theme. Examples.
* Rocky 8: `Green Obsidian` - Submissions only to extras
* Rocky 9: `Blue Onyx` - This should be generally a blue theme/color scheme. Submissions only to extras.
* Rocky 10: `Red Quartz` - This should be generally a red-like theme/color scheme
* **Required Resolution(s) for Normal Submissions**:
* Resolution must **not** exceed nor fall below: 4092x3072
* **Allowed**:
* Anything related to nature, mountains, rocks, and the like (generally fitting into the "rocky" idea)
* Anything related to the codename (eg. Blue Onyx)
* Anything minimalist/abstract is allowed
* References to the release number (like 9, and so on) are allowed
* Complementary colors should be allowed in the image within reason
* Contrasting colors should be allowed in the image within reason
* Photography + Manipulation should be allowed within reasonG
* **Highly Encouraged**: [Day](https://github.com/fedoradesign/backgrounds/raw/f34-backgrounds/default/f34-01-day.png) and [Night](https://github.com/fedoradesign/backgrounds/raw/f34-backgrounds/default/f34-02-night.png) versions of wallpapers
* **Discouraged**:
* Avoid using the Rocky logo, unless it fits with an abstract/minimalist idea for the background
* Plain backdrops with the rocky logo are *not* permitted
* **Prohibited**:
* Any AI generated artwork
#### Minimum Resolutions
For general submissions, we require the highest resolution to make things simpler, that way the user should be able to use a wallpaper without having to choose "the right one" for their monitor size. However, for the case case of extra backgrounds, this requirement is more relaxed. If a submitter wishes not to use the highest resolution but opts to make multiple resolutions instead, they should follow the below list:
* **Minimum Required Resolutions**:
* 16:9 (1920x1080)
* 16:10 (1920x1200)
* 5:4 (2048x1536)
* 4:3 (1280x1024)
* **Additional (encouraged) allowed resolutions**:
* 3440x1440
* 2560x1600
* 2560x1440
* 2560x1080
* 1800x1440
* Portrait mode versions of the above are optional
The placeholders in [this commit](https://github.com/rocky-linux/rocky-logos/tree/962a836f70a131faa541a4f8f73a4a3fddfc3dbf/backgrounds) shows an example of using the minimum resolutions.
### Extras Package
If a wallpaper does not make it to the main package (whether it doesn't meet guidelines or is simply just Rocky inspired), they should be able to be submitted for addition into the `rocky-backgrounds-extras` package.

View File

@ -1,59 +0,0 @@
---
title: Release General Overview
---
This section goes over at a high level how we compose releases for Rocky Linux.
As most of our tools are home grown, we have made sure that the tools are open
source and in our git services.
This page should serve as an idea of the steps we generally take and we hope
that other projects out there who wish to also use our tools can make sure they
can use them in this same way, whether they want to be an Enterprise Linux
derivative or another project entirely.
## Build System and Tools
The tools in use for the distribution are in the table below.
| Tool | Maintainer | Code Location |
|-----------------|------------------|---------------------------------------------------------------------|
| srpmproc | SIG/Core at RESF | [GitHub](https://github.com/rocky-linux/srpmproc) |
| empanadas | SIG/Core at RESF | [sig-core-toolkit](https://github.com/rocky-linux/sig-core-toolkit) |
| Peridot | SIG/Core at RESF | [GitHub](https://github.com/rocky-linux/peridot) |
| MirrorManager 2 | Fedora Project | [MirrorManager2](https://github.com/fedora-infra/mirrormanager2) |
For Rocky Linux to be build, we use `Peridot` as the build system and
`empanadas` to "compose" the distribution. As we do not use Koji for Rocky
Linux beyond version 9, pungi can no longer be used. Peridot instead takes
pungi configuration data and comps and transforms them into a format it
can understand. Empanadas then comes in to do the "compose" and sync
all the repositories down.
## Full Compose (major or minor releases)
Step by step, it looks like this:
* Distribution is built and maintained in Peridot
* Comps and pungi configuration is converted into the peridot format for the project
* Repositories are created in yumrepofs based on the configuration provided
* A repoclosure is ran against the repositories from empanadas to ensure there are no critical issues
* In Parallel:
* Repositories are synced as a "full run" in empanadas
* Lorax is ran using empanadas in the peridot cluster
* Lorax results are pulled down from an S3 bucket
* DVD images are built for each architecture
* Compose directory is synced to staging for verification
* Staging is synced to production to allow mirror syncing
* Bit flip on release day
## General Updates
Step by step, it looks like this:
* Distribution is maintained in Peridot
* Updates are built, repos are then "hashed" in yumrepofs
* Empanadas syncs updates as needed, either per repo or all repos at once
* Updates are synced to staging to be verified
* Staging is synced to production to allow mirror syncing

View File

@ -1,7 +0,0 @@
---
title: Peridot Build System
---
This page goes over the Peridot Build System and how SIG/Core utilizes it.
More to come.

View File

@ -1,11 +0,0 @@
---
title: Rebuild Version Bump
---
In some cases, a package has to be rebuilt. A package may be rebuilt for these reasons:
* Underlying libraries have been rebased
* ABI changes that require a rebuild (mass rebuilds, though they are rare)
* New architecture added to a project
This typically applies to packages being built from a given `src` subgroup. Packages pulled from upstream don't fall into this category in normal circumstances. In those cases, they receive `.0.1` and so on as standalone rebuilds.

View File

@ -1,6 +0,0 @@
---
nav:
- ... | index.md
- ... | empanadas_common.md
- ... | empanadas_config.md
- ... | empanadas_sig_config.md

View File

@ -1,237 +0,0 @@
---
title: Empanadas common.py Configuration
---
The `common.py` configuration contains dictionaries and classes that dictate
most of the functionality of empanadas.
## Config Items
type: Dictionary
### config.rlmacro
type: String
required: True
description: Empanadas expects to run on an EL system. This is part of the
general check up. It should not be hardcoded and use the rpm python module.
### config.dist
type: String
required: False
description: Was the original tag placed in mock configs. This combines `el`
with the rpm python module expansion. This is no longer required. The
option is still available for future use.
### config.arch
type: String
required: True
description: The architecture of the current running system. This is checked
against the supported architectures in general release configurations. This
should not be hardcoded.
### config.date_stamp
type: String
required: True
description: Date time stamp in the form of YYYYMMDD.HHMMSS. This should not be
hardcoded.
### config.compose_root
type: String
required: True
description: Root path of composes on the system running empanadas.
### config.staging_root
type: String
required: False
description: For future use. Root path of staging repository location where
content will be synced to.
### config.production_root
type: String
required: False
description: For future use. Root path of production repository location where
content will be synced to from staging.
### config.category_stub
type: String
required: True
description: For future use. Stub path that is appended to `staging_root` and
`production_root`.
example: `mirror/pub/rocky`
### config.sig_category_stub
type: String
required: True
description: For future use. Stub path that is appended to `staging_root` and
`production_root` for SIG content.
example: `mirror/pub/sig`
### config.repo_base_url
type: String
required: True
description: URL to the base url's where the repositories live. This is
typically to a peridot instance. This is supplemented by the configuration
`project_id` parameter.
Note that this does not have to be a peridot instance. The combination of this
value and `project_id` can be sufficient enough for empanadas to perform its
work.
### config.mock_work_root
type: String
required: True
description: Hardcoded path to where ISO work is performed within a mock chroot.
This is the default path created by mock and it is recommended not to change
this.
example: `/builddir`
### config.container
type: String
required: True
description: This is the container used to perform all operations in podman.
example: `centos:stream9`
### config.distname
type: String
required: True
description: Name of the distribution you are building or building for.
example: `Rocky Linux`
### config.shortname
type: String
required: True
description: Short name of the distribution you are building or building for.
example: `Rocky`
### config.translators
type: Dictionary
required: True
description: Translates Linux architectures to golang architectures. Reserved
for future use.
### config.aws_region
type: String
required: False
description: Region you are working in with AWS or onprem cloud that supports
this variable.
example: `us-east-2`
### config.bucket
type: String
required: False
description: Name of the S3-compatible bucket that is used to pull images from.
Requires `aws_region`.
### config.bucket_url
type: String
required: False
description: URL of the S3-compatible bucket that is used to pull images from.
## allowed_type_variants items
type: Dictionary
description: Key value pairs of cloud or image variants. The value is either
`None` or a list type.
## Reference Example
```
config = {
"rlmacro": rpm.expandMacro('%rhel'),
"dist": 'el' + rpm.expandMacro('%rhel'),
"arch": platform.machine(),
"date_stamp": time.strftime("%Y%m%d.%H%M%S", time.localtime()),
"compose_root": "/mnt/compose",
"staging_root": "/mnt/repos-staging",
"production_root": "/mnt/repos-production",
"category_stub": "mirror/pub/rocky",
"sig_category_stub": "mirror/pub/sig",
"repo_base_url": "https://yumrepofs.build.resf.org/v1/projects",
"mock_work_root": "/builddir",
"container": "centos:stream9",
"distname": "Rocky Linux",
"shortname": "Rocky",
"translators": {
"x86_64": "amd64",
"aarch64": "arm64",
"ppc64le": "ppc64le",
"s390x": "s390x",
"i686": "386"
},
"aws_region": "us-east-2",
"bucket": "resf-empanadas",
"bucket_url": "https://resf-empanadas.s3.us-east-2.amazonaws.com"
}
ALLOWED_TYPE_VARIANTS = {
"Azure": None,
"Container": ["Base", "Minimal", "UBI"],
"EC2": None,
"GenericCloud": None,
"Vagrant": ["Libvirt", "Vbox"],
"OCP": None
}
```

View File

@ -1,626 +0,0 @@
---
title: Empanadas config yaml Configuration
---
Each file in `empanads/config/` is a yaml file that contains configuration
items for the distribution release version. The configuration can heavily
dictate the functionality and what features are directly supported by empanadas
when ran.
See the items below to see which options are mandatory and optional.
# Config Items
## Top Level
The Top Level is the name of the profile and starts the YAML dictionary for the
release. It is alphanumeric and accepts punctuation within reason. Common
examples:
* `9`
* `9-beta`
* `8-lookahead`
### fullname
type: String
required: True
description: Needed for treeinfo and discinfo generation.
### revision
type: String
required: True
description: Full version of a release
### rclvl
type: String
required: True
description: Release Candidate or Beta descriptor. Sets names and versions with
this descriptor if enabled.
### major
type: String
required: True
description: Major version of a release
### minor
type: String
required: True
description: Minor version of a release
### profile
type: String
required: True
description: Matches the top level of the release. This should not differ from
the top level assignment.
### disttag
type: String
required: True
description: Sets the dist tag for mock configs.
### bugurl
type: String
required: True
description: A URL to the bug tracker for this release or distribution.
### checksum
type: String
required: True
description: Checksum type. Used when generating checksum information for
images.
### fedora_major
type: String
required: False
description: For future use with icicle.
### allowed_arches
type: list
required: True
description: List of supported architectures for this release.
### provide_multilib
type: boolean
required: True
description: Sets if architecture x86_64 will be multilib. It is recommended
that this is set to `True`.
### project_id
type: String
required: True
description: Appended to the base repo URL in common.py. For peridot, it is the
project id that is generated for the project you are pulling from. It can be set
to anything else if need be for non-peridot use.
### repo_symlinks
type: dict
required: False
description: For future use. Sets symlinks to repositories for backwards
compatibility. Key value pairs only.
### renames
type: dict
required: False
description: Renames a repository to the value set. For example, renaming `all`
to `devel`. Set to `{}` if no renames are goign to occur.
### all_repos
type: list
required: True
description: List of repositories that will be synced/managed by empanadas.
### structure
type: dict
required: True
description: Key value pairs of `packages` and `repodata`. These are appended
appropriately during syncing and ISO actions. Setting these are mandatory.
### iso_map
type: dictionary
required: True if building ISO's and operating with lorax.
description: Controls how lorax and extra ISO's are built.
If are you not building images, set to `{}`
#### xorrisofs
type: boolean
required: True
description: Dictates of xorrisofs is used to build images. Setting to false
uses genisoimage. It is recommended that xorrisofs is used.
#### iso_level
type: boolean
required: True
description: Set to false if you are using xorrisofs. Can be set to true when
using genisoimage.
#### images
type: dict
required: True
description: Dictates the ISO images that will be made or the treeinfo that will
be generated.
**Note**: The primary repository (for example, BaseOS) will need to be listed to
ensure the treeinfo data is correctly generated. `disc` should be set to `False`
and `isoskip` should be set to `True`. See the example section for an example.
##### name.disc
type: boolean
required: True
description: This tells the iso builder if this will be a generated ISO.
##### name.isoskip
type: boolean
required: False
description: This tells the iso builder if this will be skipped, even if `disc`
is set to `True`. Default is `False`.
##### name.variant
type: string
required: True
description: Names the primary variant repository for the image. This is set in
.treeinfo.
##### name.repos
type: list
required: True
description: Names of the repositories included in the image. This is added to
.treeinfo.
##### name.volname
type: string
required: True
required value: `dvd`
description: This is required if building more than the DVD image. By default,
the the name `dvd` is harcoded in the buildImage template.
#### lorax
type: dict
required: True if building lorax images.
description: Sets up lorax images and which repositories to use when building
lorax images.
##### lorax.repos
type: list
required: True
description: List of repos that are used to pull packages to build the lorax
images.
##### lorax.variant
type: string
required: True
description: Base repository for the release
##### lorax.lorax_removes
type: list
required: False
description: Excludes packages that are not needed when lorax is running.
##### lorax.required_pkgs
type: list
required: True
description: Required list of installed packages needed to build lorax images.
#### livemap
type: dict
required: False
description: Dictates what live images are built and how they are built.
##### livemap.git_repo
type: string
required: True
description: The git repository URL where the kickstarts live
##### livemap.branch
type: string
required: True
description: The branch being used for the kickstarts
##### livemap.ksentry
type: dict
required: True
description: Key value pairs of the live images being created. Key being the
name of the live image, value being the kickstart name/path.
##### livemap.allowed_arches
type: list
required: True
description: List of allowed architectures that will build for the live images.
##### livemap.required_pkgs
type: list
required: True
description: Required list of packages needed to build the live images.
#### cloudimages
type: dict
required: False
description: Cloud related settings.
Set to `{}` if not needed.
##### cloudimages.images
type: dict
required: True
description: Cloud images that will be generated and in a bucket to be pulled,
and their format.
###### cloudimages.images.name
type: dict
required: True
description: Name of the cloud image being pulled.
Accepted key value options:
* `format`, which is `raw`, `qcow2`, `vhd`, `tar.xz`
* `variants`, which is a list
* `primary_variant`, which symlinks to the "primary" variant in the variant list
#### repoclosure_map
type: dict
required: True
description: Repoclosure settings. These settings are absolutely required when
doing full syncs and need to check repositories for consistency.
##### repoclosure_map.arches
type: dict
required: True
description: For each architecture (key), dnf switches/settings that dictate how
repoclosure will check for consistency (value, string).
example: `x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'`
##### repoclosure_map.repos
type: dict
required: True
description: For each repository that is pulled for a given release(key),
repositories that will be included in the repoclosure check. A repository that
only checks against itself must have a value of `[]`.
#### extra_files
type: dict
required: True
description: Extra files settings and where they come from. Git repositories are
the only supported method.
##### extra_files.git_repo
type: string
required: True
description: URL to the git repository with the extra files.
##### extra_files.git_raw_path
type: string
required: True
description: URL to the git repository with the extra files, but the "raw" url
form.
example: `git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'`
##### extra_files.branch
type: string
required: True
description: Branch where the extra files are pulled from.
##### extra_files.gpg
type: dict
required: True
description: For each gpg key type (key), the relative path to the key in the
git repository (value).
These keys help set up the repository configuration when doing syncs.
By default, the RepoSync class sets `stable` as the gpgkey that is used.
##### extra_files.list
type: list
required: True
description: List of files from the git repository that will be used as "extra"
files and placed in the repositories and available to mirrors and will appear on
ISO images if applicable.
# Reference Example
```
---
'9':
fullname: 'Rocky Linux 9.0'
revision: '9.0'
rclvl: 'RC2'
major: '9'
minor: '0'
profile: '9'
disttag: 'el9'
bugurl: 'https://bugs.rockylinux.org'
checksum: 'sha256'
fedora_major: '20'
allowed_arches:
- x86_64
- aarch64
- ppc64le
- s390x
provide_multilib: True
project_id: '55b17281-bc54-4929-8aca-a8a11d628738'
repo_symlinks:
NFV: 'nfv'
renames:
all: 'devel'
all_repos:
- 'all'
- 'BaseOS'
- 'AppStream'
- 'CRB'
- 'HighAvailability'
- 'ResilientStorage'
- 'RT'
- 'NFV'
- 'SAP'
- 'SAPHANA'
- 'extras'
- 'plus'
structure:
packages: 'os/Packages'
repodata: 'os/repodata'
iso_map:
xorrisofs: True
iso_level: False
images:
dvd:
disc: True
variant: 'AppStream'
repos:
- 'BaseOS'
- 'AppStream'
minimal:
disc: True
isoskip: True
repos:
- 'minimal'
- 'BaseOS'
variant: 'minimal'
volname: 'dvd'
BaseOS:
disc: False
isoskip: True
variant: 'BaseOS'
repos:
- 'BaseOS'
- 'AppStream'
lorax:
repos:
- 'BaseOS'
- 'AppStream'
variant: 'BaseOS'
lorax_removes:
- 'libreport-rhel-anaconda-bugzilla'
required_pkgs:
- 'lorax'
- 'genisoimage'
- 'isomd5sum'
- 'lorax-templates-rhel'
- 'lorax-templates-generic'
- 'xorriso'
cloudimages:
images:
EC2:
format: raw
GenericCloud:
format: qcow2
livemap:
git_repo: 'https://git.resf.org/sig_core/kickstarts.git'
branch: 'r9'
ksentry:
Workstation: rocky-live-workstation.ks
Workstation-Lite: rocky-live-workstation-lite.ks
XFCE: rocky-live-xfce.ks
KDE: rocky-live-kde.ks
MATE: rocky-live-mate.ks
allowed_arches:
- x86_64
- aarch64
required_pkgs:
- 'lorax-lmc-novirt'
- 'vim-minimal'
- 'pykickstart'
- 'git'
variantmap:
git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'
branch: 'r9'
git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9/'
repoclosure_map:
arches:
x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'
aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'
ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'
s390x: '--forcearch=s390x --arch=s390x --arch=noarch'
repos:
devel: []
BaseOS: []
AppStream:
- BaseOS
CRB:
- BaseOS
- AppStream
HighAvailability:
- BaseOS
- AppStream
ResilientStorage:
- BaseOS
- AppStream
RT:
- BaseOS
- AppStream
NFV:
- BaseOS
- AppStream
SAP:
- BaseOS
- AppStream
- HighAvailability
SAPHANA:
- BaseOS
- AppStream
- HighAvailability
extra_files:
git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'
git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'
branch: 'r9'
gpg:
stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'
testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
list:
- 'SOURCES/Contributors'
- 'SOURCES/COMMUNITY-CHARTER'
- 'SOURCES/EULA'
- 'SOURCES/LICENSE'
- 'SOURCES/RPM-GPG-KEY-Rocky-9'
- 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'
...
```

View File

@ -1,16 +0,0 @@
---
title: Empanadas SIG yaml Configuration
---
Each file in `empanads/sig/` is a yaml file that contains configuration
items for the distribution release version. The configuration determines the
structure of the SIG repositories synced from Peridot or a given repo.
Note that a release profile (for a major version) is still required for this
sync to work.
See the items below to see which options are mandatory and optional.
## Config Items
## Reference Example

View File

@ -1,5 +0,0 @@
---
title: References
---
Use this section to locate reference configuration items for the toolkit.

View File

@ -1,33 +0,0 @@
---
title: Release Engineering (SIG/Core) Meeting 2024-03-18
---
## Attendees
* Louis Abel
* Mustafa Gezen
* Sherif Nagy
* Neil Hanlon
* Pablo Greco
* Taylor Goodwill
* Skip Grube
## Old Business
* Ticket opened to track current packages not yet built for Rocky Linux 10
To fill.
## New Business
* RISCV...
To fill.
## Open Floor
To fill.
## Action Items
To fill.

View File

@ -1,66 +0,0 @@
<h4>Resources</h4>
=== "Account Services"
**URL**: [https://accounts.rockylinux.org](https://accounts.rockylinux.org)
**Purpose**: Account Services maintains the accounts for almost all components of the Rocky ecosystem
**Technology**: Noggin used by Fedora Infrastructure
**Contact**: `~Infrastructure` in Mattermost and `#rockylinux-infra` in Libera IRC
=== "Git (RESF Git Service)"
**URL**: [https://git.resf.org](https://git.resf.org)
**Purpose**: General projects, code, and so on for the Rocky Enterprise Software Foundation.
**Technology**: [Gitea](https://gitea.io/en-us/)
**Contact**: `~Infrastructure`, `~Development` in Mattermost and `#rockylinux-infra`, `#rockylinux-devel` in Libera IRC
=== "Git (Rocky Linux GitHub)"
**URL**: [https://github.com/rocky-linux](https://github.com/rocky-linux)
**Purpose**: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.
**Technology**: [GitHub](https://github.com)
**Contact**: `~Infrastructure`, `~Development` in Mattermost and `#rockylinux-infra`, `#rockylinux-devel` in Libera IRC
=== "Git (Rocky Linux GitLab)"
**URL**: [https://git.rockylinux.org](https://git.rockylinux.org)
**Purpose**: Packages and light code for the Rocky Linux distribution
**Technology**: [GitLab](https://gitlab.com)
**Contact**: `~Infrastructure`, `~Development` in Mattermost and `#rockylinux-infra`, `#rockylinux-devel` in Libera IRC
=== "Mail Lists"
**URL**: [https://lists.resf.org](https://lists.resf.org)
**Purpose**: Users can subscribe and interact with various mail lists for the Rocky ecosystem
**Technology**: Mailman 3 + Hyper Kitty
**Contact**: `~Infrastructure` in Mattermost and `#rockylinux-infra` in Libera IRC
=== "Contacts"
| Name | Email | Mattermost Name | IRC Name |
|---------------------------------|-------------------------|-------------------|--------------------|
| Louis Abel | label@rockylinux.org | @nazunalika | Sokel/label/Sombra |
| Mustafa Gezen | mustafa@rockylinux.org | @mustafa | mstg |
| Skip Grube | skip@rockylinux.org | @skip77 | |
| Sherif Nagy | sherif@rockylinux.org | @sherif | |
| Pablo Greco | pgreco@rockylinux.org | @pgreco | pgreco |
| Neil Hanlon | neil@resf.org | @neil | neil |
| Taylor Goodwill | tg@resf.org | @tgo | tg |

View File

@ -1,60 +0,0 @@
---
title: Release Engineering (SIG/Core)
---
# Release Engineering (SIG/Core) Wiki
## About
The Rocky Linux Release Engineering Team (also known as SIG/Core) dedicates themselves to the development, building, management, production, and release of Rocky Linux. This group combines development and infrastructure in a single, cohesive unit of individuals that ultimately make the distribution happen.
The "SIG/Core" reference name is not a strict Special Interest Group (as defined by [the Rocky Linux wiki](https://wiki.rockylinux.org/special_interest_groups/)).
The general goals (or "interests") is:
* To ensure Rocky Linux is built and released in a complete and functional manner
* To ensure proper collaboration and development of the Peridot Build System
* To ensure all users, developers, and Special Interest Groups are have a solid, stable platform to build upon
## Mission
Release Engineering strives to ensure a stable distribution is developed, built, tested, and provided to the community from the RESF as a compatible derivative of Red Hat Enterprise Linux. To achieve this goal, some of the things we do are:
* Ensuring a quality and fully compatible release product
* Developing and iterating on the build systems and architecture
* Developing all code in the open
* Setting the technical direction for the build system architecture
* Release of beta and final products to the end users and mirrors
* Release of timely updates to the end users and mirrors
See the [What We Do](what_we_do.md) page for a more detailed explanation of our activities.
## Getting In Touch / Contributing
There are various ways to get in touch with Release Engineering and provide help, assistance, or even just ideas that can benefit us or the entire community.
* Chat
* Mattermost: [~development](https://chat.rockylinux.org/rocky-linux/channels/development) on Mattermost
* IRC: #rockylinux and #rockylinux-devel on [libera.chat](https://libera.chat)
* [RESF SIG/Core Issue Tracker](https://git.resf.org/sig_core/meta/issues)
* [Mail List](https://lists.resf.org/mailman3/lists/rocky.lists.resf.org/)
For a list of our members, see the [Members](members.md) page.
## Resources and Rocky Linux Policies
* [RESF Git Service](https://git.resf.org)
* [Rocky Linux GitHub](https://github.com/rocky-linux)
* [Rocky Linux GitLab](https://git.rockylinux.org)
* [Rocky Linux Image Guide](https://wiki.rockylinux.org/rocky/image/)
* [Rocky Linux Repository Guide](https://wiki.rockylinux.org/rocky/repo/)
* [Rocky Linux Release Version Guide/Policy](https://wiki.rockylinux.org/rocky/version/)
* [Special Interest Groups](https://wiki.rockylinux.org/special_interest_groups/).
## General Packaging Resources
* [RPM Packaging Guide](https://rpm-packaging-guide.github.io/)
* [Fedora Packaging Guidelines](https://docs.fedoraproject.org/en-US/packaging-guidelines/)
* [Basic Packaging Tutorial](https://docs.fedoraproject.org/en-US/package-maintainers/Packaging_Tutorial_GNU_Hello/)

View File

@ -1,5 +0,0 @@
---
nav:
- ... | index.md
- Koji Tagging: koji_tagging.md
- Debrand List: debrand_list.md

View File

@ -1,83 +0,0 @@
---
title: Rocky Debrand Packages List
---
This is a list of packages that require changes to their material for acceptance in Rocky Linux. Usually this means there is some text or images in the package that reference upstream trademarks, and these must be swapped out before we can distribute them.
The first items in this list are referenced from the excellent CentOS release notes here: https://wiki.centos.org/Manuals/ReleaseNotes/CentOS8.1905#Packages_modified_by_CentOS
It is assumed that we will have to modify these same packages. It is also assumed that these changed packages might not necessarily be debranding.
However, this list is incomplete. For example, the package **Nginx** does not appear on the list, and still has RHEL branding in the CentOS repos. We will need to investigate the rest of the package set and find any more packages like this that we must modify.
One way to find said changes is to look for `?centos` tags in the SPEC file, while also looking at the manual debranding if there was any for the `c8` branches.
There will be cases where a search and replace for `?centos` to `?rocky` will be sufficient.
Current patches (for staging) are [here](https://git.rockylinux.org/staging/patch).
## Packages that need debranding changes:
| Package | Notes | Work Status |
|:--------|--------------|-------------------------|
| abrt | See [here](https://git.rockylinux.org/staging/patch/abrt) | **DONE** |
| anaconda | See [here](https://git.rockylinux.org/staging/patch/anaconda) | **DONE** |
| apache-commons-net | AppStream module with elevating branch names | NO CHANGES REQUIRED |
| ~~basesystem~~ | (does not require debranding, it is a skeleton package) | NO CHANGES REQUIRED |
| cloud-init | See [here](https://git.rockylinux.org/staging/patch/cloud-init) | **DONE** - NEEDS REVIEW IN GITLAB (Rich Alloway) |
| cockpit | See [here](https://git.rockylinux.org/staging/patch/cockpit) | **DONE** |
| ~~compat-glibc~~ | | NOT IN EL 8 |
| dhcp | See [here](https://git.rockylinux.org/staging/patch/dhcp) | **DONE**, NEEDS REVIEW IN GITLAB (Rich Alloway) |
| firefox | See [here](https://git.rockylinux.org/staging/patch/firefox) -- Still requires a distribution.ini ID | **MOSTLY DONE** (Louis) |
| fwupdate | | NOT STARTED |
| glusterfs | Changes don't appear to be required | NO CHANGES REQUIRED |
| gnome-settings-daemon | No changes required for now. | NO CHANGES REQUIRED |
| grub2 | (secureboot patches not done, just debrand) See [here](https://git.rockylinux.org/staging/patch/grub2) | **DONE**, NEEDS REVIEW IN GITLAB AND SECUREBOOT (Rich Alloway) |
| httpd | See [here](https://git.centos.org/rpms/httpd/c/2f74eecf85362e67c403b7b1386a729da3e5c33d?branch=c8-stream-2.4) | **DONE** |
| initial-setup | See [here](https://git.rockylinux.org/staging/patch/initial-setup) | **DONE** |
| ipa | This is a dual change: Logos and ipaplatform. Logos are taken care of in `rocky-logos` and the `ipaplatform` is taken care of here. See [here](https://git.rockylinux.org/staging/patch/ipa) | **DONE** |
| ~~kabi-yum-plugins~~ | | NOT IN EL 8 |
| kernel | See [here](https://git.centos.org/rpms/kernel/c/20287bd53a5c2e87db2470380271b72ac8a1ed59?branch=c8) for a potential example | NOT STARTED |
| ~~kde-settings~~ | | NOT IN EL 8 |
| libreport | See [here](https://git.rockylinux.org/staging/patch/libreport) | **DONE** |
| oscap-anaconda-addon | See [here](https://git.rockylinux.org/staging/patch/oscap-anaconda-addon) | **DONE** Requires install QA |
| PackageKit | See [here](https://git.rockylinux.org/staging/patch/PackageKit) | **DONE** |
| ~~pcs~~ | | NO CHANGES REQUIRED |
| plymouth | See [here](https://git.rockylinux.org/staging/patch/plymouth) | **DONE** |
| ~~redhat-lsb~~ | | NO CHANGES REQUIRED |
| redhat-rpm-config | See [here](https://git.rockylinux.org/staging/patch/redhat-rpm-config) | **DONE** |
| scap-security-guide | QA is likely required to test this package as it is | NO CHANGES REQUIRED, QA REQUIRED |
| shim | | NOT STARTED |
| shim-signed | | NOT STARTED |
| sos | See [here](https://git.rockylinux.org/staging/patch/sos) | **DONE** |
| subscription-manager | See [here](https://git.rockylinux.org/staging/patch/subscription-manager) | **DONE**, NEEDS REVIEW |
| ~~system-config-date~~ | | NOT IN EL8 |
| ~~system-config-kdump~~ | | NOT IN EL8 |
| thunderbird | See [here](https://git.rockylinux.org/staging/patch/thunderbird) | **DONE** |
| ~~xulrunner~~ | | NOT IN EL 8 |
| ~~yum~~ | | NO CHANGES REQUIRED |
| **(end of CentOS list)**
| nginx | Identified changes, in staging | (ALMOST) **DONE** |
## Packages that need to become other packages:
We will want to create our own versions of these packages. The full "lineage" is shown, from RHEL -> CentOS -> Rocky (Where applicable)
| Package | Notes |
|:--------|--------------|
| redhat-indexhtml -> centos-indexhtml -> rocky-indexhtml | [Here](https://git.rockylinux.org/original/rpms/rocky-indexhtml) |
| redhat-logos -> centos-logos -> rocky-logos | [Here](https://git.rockylinux.org/original/rpms/rocky-logos) |
| redhat-release-* -> centos-release -> rocky-release | [Here](https://git.rockylinux.org/original/rpms/rocky-release) |
| centos-backgrounds -> rocky-backgrounds | Provided by [logos](https://git.rockylinux.org/original/rpms/rocky-logos) |
| centos-linux-repos -> rocky-repos | [Here](https://git.rockylinux.org/original/rpms/rocky-repos) |
| centos-obsolete-packages | [Here](https://git.rockylinux.org/original/rpms/rocky-obsolete-packages) |
## Packages that Exist in RHEL, but not in CentOS
For sake of complete information, here is a list of packages that are in RHEL 8, but do not exist in CentOS 8. We do not need to worry about these packages:
- insights-client
- Red_Hat_Enterprise_Linux-Release_Notes-8-*
- redhat-access-gui
- redhat-bookmarks
- subscription-manager-migration
- subscription-manager-migration-data

View File

@ -1,9 +0,0 @@
---
title: Legacy
---
Legacy documentation comes here.
[Debrand List](debrand_list.md)
[Koji Tagging](koji_tagging.md)

View File

@ -1,85 +0,0 @@
---
title: Koji Tagging Strategy
---
This document covers how the Rocky Linux Release Engineering Team handles the tagging for builds in Koji and how it affects the overall build process.
## Contact Information
| | |
| - | - |
| **Owner** | Release Engineering Team |
| **Email Contact** | releng@rockylinux.org |
| **Mattermost Contacts** | `@label` `@mustafa` `@neil` `@tgo` |
| **Mattermost Channels** | `~Development` |
## What is Koji?
Koji is the build system used for Rocky Linux, as well as CentOS, Fedora, and likely others. Red Hat is likely to use a variant of Koji called "brew" with similar functionality and usage. Koji uses mock, a common RPM building utility, to build RPMs in a chroot environment.
## Architecture of Koji
### Components
Koji comprises of multiple components:
* `koji-hub`, which is the center of all Koji operations. It runs XML-RPC and relies on other components to call it for actions. This piece will also talk to the database and is one component that has write access to the filesystem.
* `kojid`, which is the daemon that runs on the builder nodes. It's responsibility is to talk to the hub for actions in which it can or has to perform, for example, building an RPM or install images. But that is not all that it can do.
* `koji-web` is a set of scripts that provides the web interface that anyone can see at our [koji](https://kojidev.rockylinux.org).
* `koji` is the command line utility that is commonly used - It is a wrapper of the various API commands that can be called. In our environment, it requires a login via kerberos.
* `kojira` is a component that ensures repodata is updated among the build tags.
### Tags
Tags are the most important part of the koji ecosystem. With tags, you can have specific repository build roots for the entire distribution or just a simple subset of builds that should not polute the main build tags (for example, for SIGs where a package or two might be newer (or even older) than what's in BaseOS/AppStream.
Using tags, you can setup what is called "inheritance". So for example. You can have a tag named `dist-rocky8-build` but it happens to inherit `dist-rocky8-updates-build`, which will likely have a newer set of packages than the former. Inheritance, in a way, can be considered setting "dnf priorities" if you've done that before. Another way to look at it is "ordering" and "what comes first".
Targets call tags to send packages to build in, generally.
## Tag Strategy
The question that we get is "what's the difference between a build and an updates-build tag" - It's all about the inheritance. For example, let's take a look at `dist-rocky8-build`
```
dist-rocky8-build
el8
dist-rocky8
build-modules
. . .
```
In this tag, you can see that this build tag inherits el8 packages first, and then the packages in dist-rocky8, and then build-modules. This is where "base" packages start out at, generally and a lot of them won't be updated or even change with the lifecycle of the version.
```
dist-rocky8-updates-build
el8
dist-rocky8-updates
dist-rocky8
dist-rocky8-build
build-modules
```
This one is a bit different. Notice that it inherits el8 first, and then dist-rocky8-updates, which inherits dist-rocky8. And then it also pulls in dist-rocky8-build, the previous tag we were talking about. This tag is where updates for a minor release are sent to.
```
dist-rocky8_4-updates-build
el8_4
dist-rocky8-updates
dist-rocky8
dist-rocky8-build
el8
build-modules
```
Here's a more interesting one. Notice something? It's pretty similar to the last one, but see how it's named el8_4 instead? This is where updates during 8.4 are basically sent to and that's how they get tagged as `.el8_4` on the RPM's. The `el8_4` tag contains a build macros package that instructs the `%dist` tag to be set that way. When 8.5 comes out, we'll basically have the same setup.
At the end of the day, builds that happen in these updates-build tags get dropped in dist-rocky8-updates.
### What about modules?
Modules are a bit tricky. We generally don't touch how MBS does its tags or what's going on there. When builds are being done with the modules, they do end up using the el8 packages in some manner or form. The modules are separated entirely from the main tags though, so they don't polute the main tags. You don't want a situation where say, you build the latest ruby, but something builds off the default version of ruby provided in `el8` and now you're in trouble and get dnf filtering issues.
### How do we determine what is part of a compose?
There are special tags that have a `-compose` suffix. These tags are used as a way to pull down packages for repository building during the pungi process.

View File

@ -1,15 +0,0 @@
---
title: Members
---
Release Engineering (SIG/Core) is a mix of Development and Infrastructure members to ensure a high quality release of Rocky Linux as well as the uptime of the services provided to the community. The current members of this group are listed in the table below. Some members may also be found in various Special Interest Groups, such as SIG/AltArch and SIG/Kernel.
| Role | Name | Email | Mattermost Name | IRC Name |
|---------------------------------------------------|---------------------------------|------------------------ |-------------------|--------------------|
| Release Engineering Co-Lead and Infrastructure | Louis Abel | label@rockylinux.org | @nazunalika | Sokel/label/Sombra |
| Release Engineering Co-Lead | Mustafa Gezen | mustafa@rockylinux.org | @mustafa | mstg |
| Release Engineering and Development | Skip Grube | skip@rockylinux.org | @skip77 | |
| Release Engineering and Development | Sherif Nagy | sherif@rockylinux.org | @sherif | |
| Release Engineering and Development | Pablo Greco | pgreco@rockylinux.org | @pgreco | pgreco |
| Infrastructure Lead | Neil Hanlon | neil@resf.org | @neil | neil |
| Infrastructure Lead | Taylor Goodwill | tg@resf.org | @tgo | tg |

View File

@ -1,7 +0,0 @@
---
title: RPM
---
This section is primarily for documentation and useful information as it
pertains to package building and modularity. Use the menu on the left side to
find the information you're looking for.

View File

@ -1,10 +0,0 @@
---
nav:
- ... | index.md
- Release Procedures: sop_release.md
- Compose and Repo Sync for Rocky Linux 8: sop_compose_8.md
- Compose and Repo Sync for Rocky Linux and Peridot: sop_compose.md
- Compose and Repo Sync for Rocky Linux Special Interest Groups: sop_compose_sig.md
- Generalized Prep Checklist for Upcoming Releases: sop_upstream_prep_checklist.md
- Mirror Manager: sop_mirrormanager2.md
...

View File

@ -1,6 +0,0 @@
---
title: SOP (Standard Operationg Procedures)
---
This section goes over the various SOP's for SIG/Core. Please use the menu items
to find the various pages of interest.

View File

@ -1,142 +0,0 @@
---
title: 'SOP: Compose and Repo Sync for Rocky Linux and Peridot'
---
This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for the distribution. It contains information of the scripts that are utilized and in what order, depending on the use case.
## Contact Information
| | |
| - | - |
| **Owner** | Release Engineering Team |
| **Email Contact** | releng@rockylinux.org |
| **Email Contact** | infrastructure@rockylinux.org |
| **Mattermost Contacts** | `@label` `@mustafa` `@neil` `@tgo` |
| **Mattermost Channels** | `~Development` |
## Related Git Repositories
There are several git repositories used in the overall composition of a repository or a set of repositories.
[Pungi](https://git.rockylinux.org/rocky/pungi-rocky) - This repository contains all the necessary pungi configuration files that peridot translates into its own configuration. Pungi is no longer used for Rocky Linux.
[Comps](https://git.rockylinux.org/rocky/comps) - This repository contains all the necessary comps (which are groups and other data) for a given major version. Peridot (and pungi) use this information to properly build repositories.
[Toolkit](https://github.com/rocky-linux/sig-core-toolkit) - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.
## Composing Repositories
### Mount Structure
There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.
* `/mnt/compose` -> Compose data
* `/mnt/repos-staging` -> Staging
* `/mnt/repos-production` -> Production
### Empanadas
Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.
### Running a Compose
First, the toolkit must be cloned. In the `iso/empanadas` directory, run `poetry install`. You'll then have access to the various commands needed:
* `sync_from_peridot`
* `build-iso`
* `build-iso-extra`
* `pull-unpack-tree`
* `pull-cloud-image`
* `finalize_compose`
#### Full Compose
To perform a full compose, this order is expected (replacing X with major version or config profile)
```
# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-Rocky-X
poertry run sync_from_peridot --release X --hashed --repoclosure --full-run
# On each architecture, this must be ran to generate the lorax images
# !! Use --rc if the image is a release candidate or a beta image
# Note: This is typically done using kubernetes and uploaded to a bucket
poetry run build-iso --release X --isolation=None
# The images are pulled from the bucket
poetry run pull-unpack-tree --release X
# The extra ISO's (usually just DVD) are generated
# !! Use --rc if the image is a release candidate or a beta image
# !! Set --extra-iso-mode to mock if desired
# !! If there is more than the dvd, remove --extra-iso dvd
poetry run build-iso-extra --release X --extra-iso dvd --extra-iso-mode podman
# This pulls the generic and EC2 cloud images
poetry run pull-cloud-image --release X
# This ensures everything is closed out for a release. This copies iso's, images,
# generates metadata, and the like.
# !! DO NOT RUN DURING INCREMENTAL UPDATES !!
poetry run finalize_compose --release X
```
#### Incremental Compose
It is possible to simply compose singular repos if you know which ones you want to sync. This can be done when it's not for a brand new release.
```
# Set your repos as desired. --arch is also acceptable.
# --ignore-debug and --ignore-source are also acceptable options.
poetry run sync_from_peridot --release X --hashed --clean-old-packages --repo X,Y,Z
```
## Syncing Composes
Syncing utilizes the sync scripts provided in the release engineering toolkit.
When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.
The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.
```
.
├── common
├── common_8
├── common_9
```
These are for the releases in general. What they do is noted below.
```
├── gen-torrents.sh -> Generates torrents for images
├── minor-release-sync-to-staging.sh -> Syncs a minor release to staging
├── prep-staging-X.sh -> Preps staging updates and signs repos (only for 8)
├── sign-repos-only.sh -> Signs the repomd (only for 8)
├── sync-file-list-parallel.sh -> Generates file lists in parallel for mirror sync scripts
├── sync-to-prod.sh -> Syncs staging to production
├── sync-to-prod.delete.sh -> Syncs staging to production (deletes artifacts that are no longer in staging)
├── sync-to-prod-sig.sh -> Syncs a sig provided compose to production
├── sync-to-staging.sh -> Syncs a provided compose to staging
├── sync-to-staging.delete.sh -> Syncs a provided compose to staging (deletes artifacts that are no longer in the compose)
├── sync-to-staging-sig.sh -> Syncs a sig provided compose to staging
```
Generally, you will only run `sync-to-staging.sh` or `sync-to-staging.delete.sh` to sync. The former is for older releases, the latter is for newer releases. Optionally, if you are syncing a "beta" or "lookahead" release, you will need to also provide the `RLREL` variable as `beta` or `lookahead`.
```
# The below syncs to staging for Rocky Linux 8
RLVER=8 bash sync-to-staging.sh Rocky
# The below syncs to staging for Rocky Linux 9
RLVER=9 bash sync-to-staging.delete.sh Rocky
```
Once the syncs are done, staging must be tested and vetted before being sent to production. Once staging is completed, it is synced to production.
```
# Set X to whatever release
bash RLVER=X sync-to-prod.delete.sh
bash sync-file-list-parallel.sh
```
During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.
**Note**: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.

View File

@ -1,151 +0,0 @@
---
title: 'SOP: Compose and Repo Sync for Rocky Linux 8'
---
This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Rocky Linux 8. It contains information of the scripts that are utilized and in what order, depending on the use case.
Please see the other SOP for Rocky Linux 9+ that are managed via empanadas and peridot.
## Contact Information
| | |
| - | - |
| **Owner** | Release Engineering Team |
| **Email Contact** | releng@rockylinux.org |
| **Email Contact** | infrastructure@rockylinux.org |
| **Mattermost Contacts** | `@label` `@mustafa` `@neil` `@tgo` |
| **Mattermost Channels** | `~Development` |
## Related Git Repositories
There are several git repositories used in the overall composition of a repository or a set of repositories.
[Pungi](https://git.rockylinux.org/rocky/pungi-rocky) - This repository contains all the necessary pungi configuration files for composes that come from koji. Pungi interacts with koji to build the composes.
[Comps](https://git.rockylinux.org/rocky/comps) - This repository contains all the necessary comps (which are groups and other data) for a given major version. Pungi uses this information to properly build the repositories.
[Toolkit](https://github.com/rocky-linux/sig-core-toolkit) - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.
## Composing Repositories
For every stable script, there is an equal beta or lookahead script available.
### Mount Structure
There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.
* `/mnt/koji` -> Koji files store
* `/mnt/compose` -> Compose data
* `/mnt/repos-staging` -> Staging
* `/mnt/repos-production` -> Production
### Pungi
Each repository or set of repositories are controlled by various pungi configurations. For example, `r8.conf` will control the absolute base of Rocky Linux 8, which imports other git repository data as well as accompanying json or other configuration files.
### Running a Compose
Inside the `pungi` git repository, the folder `scripts` contain the necessary scripts that are ran to perform a compose. There are different types of composes:
* produce -> Generates a full compose, generally used for minor releases, which generate new ISO's
* update -> Generates a smaller compose, generally used for updates within a minor release cycle - ISO's are not generated
Each script is titled appropriately:
* `produce-X.sh` -> Generates a full compose for X major release, typically set to the current minor release according to `rX.conf`
* `produce-X-full.sh` -> Generates a full compose for X major release, including extras, plus, and devel in one go.
* `updates-X.sh` -> Generates a smaller compose for X major release, typically set to the current minor release according to `rX.conf`
* `updates-X-NAME.sh` -> Generates a compose for the specific compose, such as NFV, Rocky-devel, Extras, or Plus
* `updates-X-full.sh` -> Generates a full incremental compose for the X release, which includes extras, plus, and devel in one go. Does NOT make ISO's.
When these scripts are ran, they generate an appropriate directory under `/mnt/compose/X` with a directory and an accompanying symlink. For example. If an update to `Rocky` was made using `updates-8.sh`, the below would be made:
```
drwxr-xr-x. 5 root root 6144 Jul 21 17:44 Rocky-8-updates-20210721.1
lrwxrwxrwx. 1 root root 26 Jul 21 18:26 latest-Rocky-8 -> Rocky-8-updates-20210721.1
```
This setup also allows pungi to reuse previous package set data to reduce the time it takes to build a compose. Typically during a new minor release, all composes should be ran so they can be properly combined. Example of a typical order if releasing 8.X:
```
produce-8.sh
updates-8-devel.sh
updates-8-extras.sh
# ! OR !
produce-8-full.sh
```
## Syncing Composes
Syncing utilizes the sync scripts provided in the release engineering toolkit.
When the scripts are being ran, they are usually ran for a specific purpose. They are also ran in a certain order to ensure integrity and consistency of a release.
The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.
```
.
├── common
├── common_8
├── common_9
```
These are for the releases in general. What they do is noted below.
```
├── gen-torrents.sh -> Generates torrents for images
├── minor-release-sync-to-staging.sh -> Syncs a minor release to staging
├── sign-repos-only.sh -> Signs the repomd (only)
├── sync-to-prod.sh -> Syncs staging to production
├── sync-to-staging.sh -> Syncs a provided compose to staging
├── sync-to-staging-sig.sh -> Syncs a sig provided compose to staging
```
Generally, you will only run `minor-release-sync-to-staging.sh` when a full minor release is being produced. So for example, if 8.5 has been built out, you would run that after a compose. `gen-torrents.sh` would be ran shortly after.
When doing updates, the order of operations (preferably) would be:
```
* sync-to-staging.sh
* sync-to-staging-sig.sh -> Only if sigs are updated
* sync-to-prod.sh -> After the initial testing, it is sent to prod.
```
An example of order:
```
# The below syncs to staging
RLVER=8 bash sync-to-staging.sh Extras
RLVER=8 bash sync-to-staging.sh Rocky-devel
RLVER=8 bash sync-to-staging.sh Rocky
```
Once the syncs are done, staging must be tested and vetted before being sent to production. During this stage, the `updateinfo.xml` is also applied where necessary to the repositories to provide errata. Once staging is completed, it is synced to production.
```
pushd /mnt/repos-staging/mirror/pub/rocky/8.X
python3.9 /usr/local/bin/apollo_tree -p $(pwd) -n 'Rocky Linux 8 $arch' -i Live -i Minimal -i devel -i extras -i images -i isos -i live -i metadata -i Devel -i plus -i nfv
popd
RLVER=8 bash sign-repos-only.sh
RLVER=8 bash sync-to-prod.sh
bash sync-file-list-parallel.sh
```
During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.
**Note**: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.
## Quicker Composes
On the designated compose box, there is a script that can do all of the incremental steps.
```
cd /root/cron
bash stable-updates
```
The same goes for a full production.
```
bash stable
```

View File

@ -1,73 +0,0 @@
---
title: 'SOP: Compose and Repo Sync for Rocky Linux Special Interest Groups'
---
This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Special Interest Groups.
## Contact Information
| | |
| - | - |
| **Owner** | Release Engineering Team |
| **Email Contact** | releng@rockylinux.org |
| **Email Contact** | infrastructure@rockylinux.org |
| **Mattermost Contacts** | `@label` `@mustafa` `@neil` `@tgo` |
| **Mattermost Channels** | `~Development` |
## Composing Repositories
### Mount Structure
There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.
* `/mnt/compose` -> Compose data
* `/mnt/repos-staging` -> Staging
* `/mnt/repos-production` -> Production
### Empanadas
Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.
### Running a Compose
First, the toolkit must be cloned. In the `iso/empanadas` directory, run `poetry install`. You'll then have access to the various commands needed:
* `sync_sig`
To perform a compose of a SIG, it must be defined in the configuration. As an example, here is composing the `core` sig.
```
# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-SIG-Y-X
~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages --full-run
# This assumes the directories already exist and will update in place.
~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages
```
## Syncing Composes
Syncing utilizes the sync scripts provided in the release engineering toolkit.
When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.
For SIG's, the only files you'll need to know of are `sync-to-staging-sig.sh` and `sync-to-prod-sig.sh`. **Both scripts will delete packages and data that are no longer in the compose.**
```
# The below syncs the core 8 repos to staging
RLVER=8 bash sync-to-staging-sig.sh core
# The below syncs the core 9 repos to staging
RLVER=9 bash sync-to-staging-sig.sh core
# The below syncs everything in staging for 8 core to prod
RLVER=8 bash sync-to-prod-sig.sh core
# The below syncs everything in staging for 9 core to prod
RLVER=9 bash sync-to-prod-sig.sh core
```
Once staging is completed and reviewed, it is synced to production.
```
bash sync-file-list-parallel.sh
```
During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

View File

@ -1,154 +0,0 @@
---
title: Mirror Manager Maintenance
---
This SOP contains most if not all the information needed for SIG/Core to
maintain and operate Mirror Manager for Rocky Linux.
## Contact Information
| | |
| - | - |
| **Owner** | SIG/Core (Release Engineering & Infrastructure) |
| **Email Contact** | infrastructure@rockylinux.org |
| **Email Contact** | releng@rockylinux.org |
| **Mattermost Contacts** | `@label` `@neil` `@tgo` |
| **Mattermost Channels** | `~Infrastructure` |
## Introduction
So you made a bad decision and now have to do things to Mirror Manager. Good
luck.
## Pieces
| **Item** | Runs on... | Software |
|-------------------|------------------|---------------------------------------------------|
| Mirrorlist Server | mirrormanager001 | https://github.com/adrianreber/mirrorlist-server/ |
| Mirror Manager 2 | mirrormanager001 | https://github.com/fedora-infra/mirrormanager2 |
### Mirrorlist Server
This runs two (2) instances. Apache/httpd is configured to send `/mirrorlist`
to one and `/debuglist` to the other.
* Every fifteen (15) minutes: Mirrorlist cache is regenerated
* This queries the database for active mirrors and other information and writes a protobuf. The mirrorlist-server reads the protobuf and responds accordingly.
* Every twenty (20) minutes: Service hosting `/mirrorlist` is restarted
* Every twenty-one (21) minutes: Service hosting `/debuglist` is restarted
Note that the timing for the restart of the mirror list instances are arbitrary.
### Mirror Manager 2
This is a uwsgi service fronted by an apache/httpd instance. This is responsible
for everything else that is not `/mirrorlist` or `/debuglist`. This allows the
mirror managers to, well, manage their mirrors.
### CDN
Fastly sits in front of mirror manager. VPN is required to access the `/admin` endpoints.
If the backend of the CDN is down, it will attempt to guess what the user wanted to access and spit out a result on the dl.rockylinux.org website. For example, a request for AppStream-8 and x86_64 will result in a `AppStream/x86_64/os` directory on dl.rockylinux.org. Note that this isn't perfect, but it helps in potential down time or patching.
```
Fastly -> www firewall -> mirrormanager server
```
In reality, the flow is a lot more complex, and a diagram should be created to map it out in a more user-friendly manner (@TODO)
```
User -> Fastly -> AWS NLB over TLS, passthru -> www firewall cluster (decrypt TLS) -> mirrormanager server (Rocky CA TLS)
```
## Tasks
Below are a list of possible tasks to take with mirror manager, depending on the scenario.
### New Release
For the following steps, the following must be completed:
* Production rsync endpoints should have all brand new content
* New content root should be locked down to 750 (without this, mirror manager cannot view it)
* Disable mirrormanager user cronjobs
1. Update the database with the new content. This is run on a schedule normally (see previous section) but can be done manually.
a. As the mirror manager user, run the following:
```
/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux'
/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux SIGs'
```
2. Update the redirects for `$reponame-$releasever`
a. Use psql to mirrormanager server: `psql -U mirrormanager -W -h mirrormanager_db_host mirrormanager_db`
b. Confirm that all three columns are filled and that the second and third columns are identical:
```
select rr.from_repo AS "From Repo", rr.to_repo AS "To Repo", r.prefix AS "Target Repo" FROM repository_redirect AS rr LEFT JOIN repository AS r ON rr.to_repo = r.prefix GROUP BY r.prefix, rr.to_repo, rr.from_repo ORDER BY r.prefix ASC;`
```
c. Change the `majorversion` redirects to point to the new point release, for example:
```
update repository_redirect set to_repo = regexp_replace(to_repo, '9\.2', '9.3') where from_repo ~ '(\w+)-9-(debug|source)';`
```
d. Insert new redirects for the major version expected by the installer
```
insert into repository_redirect (from_repo,to_repo) select REGEXP_REPLACE(rr.from_repo,'9\.2','9.3'),REGEXP_REPLACE(rr.to_repo,'9\.2','9.3')FROM repository_redirect AS rr WHERE from_repo ~ '(\w+)-9.2';
```
3. Generate the mirrorlist cache and restart the debuglist and verify.
Once the bitflip is initiated, restart mirrorlist and reenable all cronjobs.
### Out-of-date Mirrors
1. Get current shasum of repomd.xml. For example: `shasum=$(curl https://dl.rockylinux.org/pub/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum)`
2. Compare against latest propagation log:
```
tail -latr /var/log/mirrormanager/propagation/rocky-9.3-BaseOS-x86_64_propagation.log.*`
export VER=9.3
awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,":")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < $(find /var/log/mirrormanager/propagation/ -name "rocky-${VER}-BaseOS-x86_64_propagation.log*" -mtime -1 | tail -1)'
```
This will generate a table. You can take the IDs in the first column and use the database to disable them by ID (table name: hosts) or go to https://mirrors.rockylinux.org/mirrormanager/host/ID and uncheck 'User active'.
Users can change user active, *but* they cannot change admin active. It is better to flip user active in this case.
Admins can also view https://mirrors.rockylinux.org/mirrormanager/admin/all_sites if necessary.
Example of table columns:
!!! Note
These mirrors are here soley as an example and not to call anyone out, every mirror shows up on here at one point, for some reason, due to natural variations in how mirrors sync.
```
[mirrormanager@ord1-prod-mirrormanager001 propagation]$ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,":")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < rocky-9.3-BaseOS-x86_64_propagation.log.1660611632 | column -t
164 mirror.host.ag http://mirror.host.ag/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
173 rocky.centos-repo.net http://rocky.centos-repo.net/9.3/BaseOS/x86_64/os/repodata/repomd.xml 403
92 rocky.mirror.co.ge http://rocky.mirror.co.ge/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
289 mirror.vsys.host http://mirror.vsys.host/rockylinux/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
269 mirrors.rackbud.com http://mirrors.rackbud.com/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
295 mirror.ps.kz http://mirror.ps.kz/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
114 mirror.liteserver.nl http://rockylinux.mirror.liteserver.nl/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
275 mirror.upsi.edu.my http://mirror.upsi.edu.my/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
190 mirror.kku.ac.th http://mirror.kku.ac.th/rocky-linux/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
292 mirrors.cat.pdx.edu http://mirrors.cat.pdx.edu/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
370 mirrors.gbnetwork.com http://mirrors.gbnetwork.com/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
308 mirror.ihost.md http://mirror.ihost.md/rockylinux/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
87 mirror.freedif.org http://mirror.freedif.org/Rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
194 mirrors.bestthaihost.com http://mirrors.bestthaihost.com/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
30 mirror.admax.se http://mirror.admax.se/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 200
195 mirror.uepg.br http://mirror.uepg.br/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404
247 mirrors.ipserverone.com http://mirrors.ipserverone.com/rocky/9.3/BaseOS/x86_64/os/repodata/repomd.xml 404'
```

View File

@ -1,139 +0,0 @@
---
title: Rocky Release Procedures for SIG/Core (RelEng/Infrastructure)
---
This SOP contains all the steps required by SIG/Core (a mix of Release
Engineering and Infrastructure) to perform releases of all Rocky Linux
versions. Work is in all collaboration within the entire group of
engineerings.
## Contact Information
| | |
| - | - |
| **Owner** | SIG/Core (Release Engineering & Infrastructure) |
| **Email Contact** | infrastructure@rockylinux.org |
| **Email Contact** | releng@rockylinux.org |
| **Mattermost Contacts** | `@label` `@neil` `@tgo` `@skip77` `@mustafa` `@sherif` `@pgreco` |
| **Mattermost Channels** | `~Infrastructure` |
## Preparation
## Notes about Release Day
Within a minimum of two (2) days, the following should be true:
1. Torrents should be setup. All files can be synced with the seed box(es) but
not yet published. The data should be verified using sha256sum and compared to
the CHECKSUM files provided with the files.
2. Website should be ready (typically with an open PR in github). The content
should be verified that the design and content are correct and finalized.
3. Enough mirrors should be setup. This essentially means that all content for
a release should be synced to our primary mirror with the executable bit turned
off, and the content should also be hard linked. In theory, mirror manager can
be queried to verify if mirrors are or appear to be in sync.
## Notes about Patch Days
Within a minimum of one (1) to two (2) days, the following should be true:
1. Updates should be completed in the build system, and verified in staging.
2. Updates should be sent to production and file lists updated to allow mirrors
to sync.
## Prior to Release Day notes
Ensure the SIG/Core Checklist is read thoroughly and executed as listed.
## Release Day
### Priorities
During release day, these should be verified/completed in order:
1. Website - The primary website and user landing at rockylinux.org should allow
the user to efficiently click through to a download link of an ISO, image, or
torrent. It must be kept up.
2. Torrent - The seed box(es) should be primed and ready to go for users
downloading via torrent.
3. Release Notes & Documentation - The release notes are often on the same
website as the documentation. The main website and where applicable in the docs
should refer to the Release Notes of Rocky Linux.
4. Wiki - If applicable, the necessary changes and resources should be available
for a release. In particular, if a major release has new repos, changed repo names,
this should be documented.
5. Everything else!
## Resources
## SIG/Core Checklist
### Beta
* Compose Completed
* Repoclosure must be checked and pass
* Lorax Run
* ISO's are built
* Cloud Images built
* Live Images built
* Compose Synced to Staging
* AWS/Azure Images in Marketplace
* Vagrant Images
* Container Images
* Mirror Manager
* Ready to Migrate from previous beta release (rltype=beta)
* Boot image install migration from previous beta release
* Pass image to Testing Team for final validation
### Release Candidate
* Compose Completed
* Repoclosure must be checked and pass
* Lorax Run
* ISO's are built
* Cloud Images built
* Live Images built
* Compose Synced to Staging
* AWS/Azure Images in Marketplace
* Vagrant Images
* Container Images
* Mirror Manager
* Ready to Migrate from previous release
* Boot image install migration from previous release
* Pass image to Testing Team for validation
### Final
* Compose Completed
* Repoclosure must be checked and pass
* Lorax Run
* ISO's are built
* Cloud Images built
* Live Images built
* Compose Synced to Staging
* AWS/Azure Images in Marketplace
* Vagrant Images
* Container Images
* Mirror Manager
* Ready to Migrate from previous release
* Boot image install migration from previous release
* Pass image to Testing Team for final validation
* Sync to Production
* Sync to Europe Mirror if applicable
* Hardlink Run
* Bitflip after 24-48 Hours
{% include "resources_bottom.md" %}

View File

@ -1,125 +0,0 @@
---
title: Generalized Prep Checklist for Upcoming Releases
---
This SOP contains general checklists required by SIG/Core to prepare and plan
for the upcoming release. This work, in general, is required to be done on a
routine basis, even months out before the next major or minor release, as it
requires monitoring of upstream's (CentOS Stream) work to ensure Rocky Linux
will remain ready and compatible with Red Hat Enterprise Linux.
## Contact Information
| | |
| - | - |
| **Owner** | SIG/Core (Release Engineering & Infrastructure) |
| **Email Contact** | infrastructure@rockylinux.org |
| **Email Contact** | releng@rockylinux.org |
| **Mattermost Contacts** | `@label` `@neil` `@tgo` `@skip77` `@mustafa` `@sherif` `@pgreco` |
| **Mattermost Channels** | `~Infrastructure` |
## General Upstream Monitoring
It is expected to monitor the following repositories upstream, as these will
indicate what is coming up for a given major or point release. These
repositories are found at the Red Hat gitlab.
* centos-release
* centos-logos
* pungi-centos
* comps
* module-defaults
These repositories can be monitored by setting to "all activity" on the bell
icon.
Upon changes to the upstream repositories, SIG/Core member should analyze the
changes and apply the same to the lookahead branches:
* rocky-release
* Manual changes required
* rocky-logos
* Manual changes required
* pungi-rocky
* Run `sync-from-upstream`
* peridot-rocky
* Configurations are generated using peridot tools
* comps
* Run `sync-from-upstream`
* rocky-module-defaults
* Run `sync-from-upstream`
## General Downward Merging
Repositories that generally track for LookAhead and Beta releases will flow
downward to the stable branch. For example:
```
* rXs / rXlh
|
|----> rX-beta
|
|----> rX
```
This applies to any specific rocky repo, such as comps, pungi, peridot-config,
and so on. As it is expected some repos will deviate in commit history, it is OK
to force push, under the assumption that changes made in the lower branch exists
in the upper branch. That way you can avoid changes/functionality being reverted
on accident.
## General Package Patching
There are packages that are patched typically for the purpose of debranding.
List of patched packages are typically maintained in a metadata repository. The
obvious ones are listed below and should be monitored and maintained properly:
* abrt
* anaconda
* anaconda-user-help
* chrony
* cockpit
* dhcp
* dnf
* firefox
* fwupd
* gcc
* gnome-session
* gnome-settings-daemon
* grub2
* initial-setup
* kernel
* kernel-rt
* libdnf
* libreoffice
* libreport
* lorax-templates-rhel
* nginx
* opa-ff
* opa-fm
* openldap
* openscap
* osbuild
* osbuild-composer
* PackageKit
* pesign
* python-pip
* redhat-rpm-config
* scap-security-guide
* shim
* shim-unsigned-x64
* shim-unsigned-aarch64
* subscription-manager
* systemd
* thunderbird

View File

@ -1,20 +0,0 @@
---
title: What We Do
---
Release Engineering (SIG/Core) was brought together as a combination of varying expertise (development and infrastructure) to try to fill in gaps of knowledge but to also to ensure that the primary goal of having a stable release of Rocky Linux is reached.
Some of the things we do in pursuit of our mission goals:
* Continuous preparation for upcoming changes from upstream (Fedora and CentOS Stream)
* Distribution release and maintenance
* Design and collaboration for the Peridot build system
* Design and development work to integrate all components together
* Maintenance of the infrastructure used to build and maintain Rocky Linux (such as ansible roles and playbooks)
* Working with the testing team with images and a platform to test
* Providing resources for Special Interest Groups
* Providing assistance and resources for users within the community to meet their goals
"Why the name SIG/Core?"
While not an actual [Special Interest Group](https://wiki.rockylinux.org/special_interest_groups/), the reality is that Release Engineering is ultimately the "core" of Rocky Linux's production. The idea of "SIG/Core" stemmed from the thought that without this group, Rocky Linux would not exist as it is now, so we are "core" to its existence. The other idea was that SIG/Core would eventually branch out to elsewhere. Where this would go, it is uncertain.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

1356
documentation/index.html Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More