diff --git a/404.html b/404.html index d04fb13..0a1118e 100644 --- a/404.html +++ b/404.html @@ -13,7 +13,7 @@ - + @@ -21,10 +21,10 @@ - + - + @@ -105,25 +105,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -682,7 +688,7 @@ - + diff --git a/assets/javascripts/bundle.51198bba.min.js b/assets/javascripts/bundle.51198bba.min.js deleted file mode 100644 index 31bd041..0000000 --- a/assets/javascripts/bundle.51198bba.min.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict";(()=>{var Ri=Object.create;var gr=Object.defineProperty;var ki=Object.getOwnPropertyDescriptor;var Hi=Object.getOwnPropertyNames,Ht=Object.getOwnPropertySymbols,Pi=Object.getPrototypeOf,yr=Object.prototype.hasOwnProperty,on=Object.prototype.propertyIsEnumerable;var nn=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))yr.call(t,r)&&nn(e,r,t[r]);if(Ht)for(var r of Ht(t))on.call(t,r)&&nn(e,r,t[r]);return e};var an=(e,t)=>{var r={};for(var n in e)yr.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&Ht)for(var n of Ht(e))t.indexOf(n)<0&&on.call(e,n)&&(r[n]=e[n]);return r};var Pt=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var $i=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of Hi(t))!yr.call(e,o)&&o!==r&&gr(e,o,{get:()=>t[o],enumerable:!(n=ki(t,o))||n.enumerable});return e};var yt=(e,t,r)=>(r=e!=null?Ri(Pi(e)):{},$i(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var cn=Pt((xr,sn)=>{(function(e,t){typeof xr=="object"&&typeof sn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(xr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,s={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function a(T){return!!(T&&T!==document&&T.nodeName!=="HTML"&&T.nodeName!=="BODY"&&"classList"in T&&"contains"in T.classList)}function c(T){var Qe=T.type,De=T.tagName;return!!(De==="INPUT"&&s[Qe]&&!T.readOnly||De==="TEXTAREA"&&!T.readOnly||T.isContentEditable)}function f(T){T.classList.contains("focus-visible")||(T.classList.add("focus-visible"),T.setAttribute("data-focus-visible-added",""))}function u(T){T.hasAttribute("data-focus-visible-added")&&(T.classList.remove("focus-visible"),T.removeAttribute("data-focus-visible-added"))}function p(T){T.metaKey||T.altKey||T.ctrlKey||(a(r.activeElement)&&f(r.activeElement),n=!0)}function m(T){n=!1}function d(T){a(T.target)&&(n||c(T.target))&&f(T.target)}function h(T){a(T.target)&&(T.target.classList.contains("focus-visible")||T.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(T.target))}function v(T){document.visibilityState==="hidden"&&(o&&(n=!0),G())}function G(){document.addEventListener("mousemove",N),document.addEventListener("mousedown",N),document.addEventListener("mouseup",N),document.addEventListener("pointermove",N),document.addEventListener("pointerdown",N),document.addEventListener("pointerup",N),document.addEventListener("touchmove",N),document.addEventListener("touchstart",N),document.addEventListener("touchend",N)}function oe(){document.removeEventListener("mousemove",N),document.removeEventListener("mousedown",N),document.removeEventListener("mouseup",N),document.removeEventListener("pointermove",N),document.removeEventListener("pointerdown",N),document.removeEventListener("pointerup",N),document.removeEventListener("touchmove",N),document.removeEventListener("touchstart",N),document.removeEventListener("touchend",N)}function N(T){T.target.nodeName&&T.target.nodeName.toLowerCase()==="html"||(n=!1,oe())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",m,!0),document.addEventListener("pointerdown",m,!0),document.addEventListener("touchstart",m,!0),document.addEventListener("visibilitychange",v,!0),G(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var fn=Pt(Er=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(f){return!1}},r=t(),n=function(f){var u={next:function(){var p=f.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(f){return encodeURIComponent(f).replace(/%20/g,"+")},i=function(f){return decodeURIComponent(String(f).replace(/\+/g," "))},s=function(){var f=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var m=typeof p;if(m!=="undefined")if(m==="string")p!==""&&this._fromString(p);else if(p instanceof f){var d=this;p.forEach(function(oe,N){d.append(N,oe)})}else if(p!==null&&m==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),f._entries&&(f._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Er);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(c,f){typeof c!="string"&&(c=String(c)),f&&typeof f!="string"&&(f=String(f));var u=document,p;if(f&&(e.location===void 0||f!==e.location.href)){f=f.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=f,u.head.appendChild(p);try{if(p.href.indexOf(f)!==0)throw new Error(p.href)}catch(T){throw new Error("URL unable to set base "+f+" due to "+T)}}var m=u.createElement("a");m.href=c,p&&(u.body.appendChild(m),m.href=m.href);var d=u.createElement("input");if(d.type="url",d.value=c,m.protocol===":"||!/:/.test(m.href)||!d.checkValidity()&&!f)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:m});var h=new e.URLSearchParams(this.search),v=!0,G=!0,oe=this;["append","delete","set"].forEach(function(T){var Qe=h[T];h[T]=function(){Qe.apply(h,arguments),v&&(G=!1,oe.search=h.toString(),G=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var N=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==N&&(N=this.search,G&&(v=!1,this.searchParams._fromString(this.search),v=!0))}})},s=i.prototype,a=function(c){Object.defineProperty(s,c,{get:function(){return this._anchorElement[c]},set:function(f){this._anchorElement[c]=f},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(c){a(c)}),Object.defineProperty(s,"search",{get:function(){return this._anchorElement.search},set:function(c){this._anchorElement.search=c,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(s,{toString:{get:function(){var c=this;return function(){return c.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(c){this._anchorElement.href=c,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(c){this._anchorElement.pathname=c},enumerable:!0},origin:{get:function(){var c={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],f=this._anchorElement.port!=c&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(f?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(c){},enumerable:!0},username:{get:function(){return""},set:function(c){},enumerable:!0}}),i.createObjectURL=function(c){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(c){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Er)});var Kr=Pt((Mt,qr)=>{/*! - * clipboard.js v2.0.11 - * https://clipboardjs.com/ - * - * Licensed MIT © Zeno Rocha - */(function(t,r){typeof Mt=="object"&&typeof qr=="object"?qr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Mt=="object"?Mt.ClipboardJS=r():t.ClipboardJS=r()})(Mt,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return Ci}});var s=i(279),a=i.n(s),c=i(370),f=i.n(c),u=i(817),p=i.n(u);function m(j){try{return document.execCommand(j)}catch(O){return!1}}var d=function(O){var E=p()(O);return m("cut"),E},h=d;function v(j){var O=document.documentElement.getAttribute("dir")==="rtl",E=document.createElement("textarea");E.style.fontSize="12pt",E.style.border="0",E.style.padding="0",E.style.margin="0",E.style.position="absolute",E.style[O?"right":"left"]="-9999px";var H=window.pageYOffset||document.documentElement.scrollTop;return E.style.top="".concat(H,"px"),E.setAttribute("readonly",""),E.value=j,E}var G=function(O,E){var H=v(O);E.container.appendChild(H);var I=p()(H);return m("copy"),H.remove(),I},oe=function(O){var E=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},H="";return typeof O=="string"?H=G(O,E):O instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(O==null?void 0:O.type)?H=G(O.value,E):(H=p()(O),m("copy")),H},N=oe;function T(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?T=function(E){return typeof E}:T=function(E){return E&&typeof Symbol=="function"&&E.constructor===Symbol&&E!==Symbol.prototype?"symbol":typeof E},T(j)}var Qe=function(){var O=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},E=O.action,H=E===void 0?"copy":E,I=O.container,q=O.target,Me=O.text;if(H!=="copy"&&H!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&T(q)==="object"&&q.nodeType===1){if(H==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(H==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Me)return N(Me,{container:I});if(q)return H==="cut"?h(q):N(q,{container:I})},De=Qe;function $e(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?$e=function(E){return typeof E}:$e=function(E){return E&&typeof Symbol=="function"&&E.constructor===Symbol&&E!==Symbol.prototype?"symbol":typeof E},$e(j)}function wi(j,O){if(!(j instanceof O))throw new TypeError("Cannot call a class as a function")}function rn(j,O){for(var E=0;E0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof I.action=="function"?I.action:this.defaultAction,this.target=typeof I.target=="function"?I.target:this.defaultTarget,this.text=typeof I.text=="function"?I.text:this.defaultText,this.container=$e(I.container)==="object"?I.container:document.body}},{key:"listenClick",value:function(I){var q=this;this.listener=f()(I,"click",function(Me){return q.onClick(Me)})}},{key:"onClick",value:function(I){var q=I.delegateTarget||I.currentTarget,Me=this.action(q)||"copy",kt=De({action:Me,container:this.container,target:this.target(q),text:this.text(q)});this.emit(kt?"success":"error",{action:Me,text:kt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(I){return vr("action",I)}},{key:"defaultTarget",value:function(I){var q=vr("target",I);if(q)return document.querySelector(q)}},{key:"defaultText",value:function(I){return vr("text",I)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(I){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return N(I,q)}},{key:"cut",value:function(I){return h(I)}},{key:"isSupported",value:function(){var I=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof I=="string"?[I]:I,Me=!!document.queryCommandSupported;return q.forEach(function(kt){Me=Me&&!!document.queryCommandSupported(kt)}),Me}}]),E}(a()),Ci=Ai},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function s(a,c){for(;a&&a.nodeType!==o;){if(typeof a.matches=="function"&&a.matches(c))return a;a=a.parentNode}}n.exports=s},438:function(n,o,i){var s=i(828);function a(u,p,m,d,h){var v=f.apply(this,arguments);return u.addEventListener(m,v,h),{destroy:function(){u.removeEventListener(m,v,h)}}}function c(u,p,m,d,h){return typeof u.addEventListener=="function"?a.apply(null,arguments):typeof m=="function"?a.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(v){return a(v,p,m,d,h)}))}function f(u,p,m,d){return function(h){h.delegateTarget=s(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=c},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var s=Object.prototype.toString.call(i);return i!==void 0&&(s==="[object NodeList]"||s==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var s=Object.prototype.toString.call(i);return s==="[object Function]"}},370:function(n,o,i){var s=i(879),a=i(438);function c(m,d,h){if(!m&&!d&&!h)throw new Error("Missing required arguments");if(!s.string(d))throw new TypeError("Second argument must be a String");if(!s.fn(h))throw new TypeError("Third argument must be a Function");if(s.node(m))return f(m,d,h);if(s.nodeList(m))return u(m,d,h);if(s.string(m))return p(m,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function f(m,d,h){return m.addEventListener(d,h),{destroy:function(){m.removeEventListener(d,h)}}}function u(m,d,h){return Array.prototype.forEach.call(m,function(v){v.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(m,function(v){v.removeEventListener(d,h)})}}}function p(m,d,h){return a(document.body,m,d,h)}n.exports=c},817:function(n){function o(i){var s;if(i.nodeName==="SELECT")i.focus(),s=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var a=i.hasAttribute("readonly");a||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),a||i.removeAttribute("readonly"),s=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var c=window.getSelection(),f=document.createRange();f.selectNodeContents(i),c.removeAllRanges(),c.addRange(f),s=c.toString()}return s}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,s,a){var c=this.e||(this.e={});return(c[i]||(c[i]=[])).push({fn:s,ctx:a}),this},once:function(i,s,a){var c=this;function f(){c.off(i,f),s.apply(a,arguments)}return f._=s,this.on(i,f,a)},emit:function(i){var s=[].slice.call(arguments,1),a=((this.e||(this.e={}))[i]||[]).slice(),c=0,f=a.length;for(c;c{"use strict";/*! - * escape-html - * Copyright(c) 2012-2013 TJ Holowaychuk - * Copyright(c) 2015 Andreas Lubbe - * Copyright(c) 2015 Tiancheng "Timothy" Gu - * MIT Licensed - */var ns=/["'&<>]/;Go.exports=os;function os(e){var t=""+e,r=ns.exec(t);if(!r)return t;var n,o="",i=0,s=0;for(i=r.index;i0&&i[i.length-1])&&(f[0]===6||f[0]===2)){r=0;continue}if(f[0]===3&&(!i||f[1]>i[0]&&f[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function W(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var n=r.call(e),o,i=[],s;try{for(;(t===void 0||t-- >0)&&!(o=n.next()).done;)i.push(o.value)}catch(a){s={error:a}}finally{try{o&&!o.done&&(r=n.return)&&r.call(n)}finally{if(s)throw s.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var n=0,o=t.length,i;n1||a(m,d)})})}function a(m,d){try{c(n[m](d))}catch(h){p(i[0][3],h)}}function c(m){m.value instanceof et?Promise.resolve(m.value.v).then(f,u):p(i[0][2],m)}function f(m){a("next",m)}function u(m){a("throw",m)}function p(m,d){m(d),i.shift(),i.length&&a(i[0][0],i[0][1])}}function ln(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof Ee=="function"?Ee(e):e[Symbol.iterator](),r={},n("next"),n("throw"),n("return"),r[Symbol.asyncIterator]=function(){return this},r);function n(i){r[i]=e[i]&&function(s){return new Promise(function(a,c){s=e[i](s),o(a,c,s.done,s.value)})}}function o(i,s,a,c){Promise.resolve(c).then(function(f){i({value:f,done:a})},s)}}function C(e){return typeof e=="function"}function at(e){var t=function(n){Error.call(n),n.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var It=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: -`+r.map(function(n,o){return o+1+") "+n.toString()}).join(` - `):"",this.name="UnsubscriptionError",this.errors=r}});function Ve(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Ie=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,n,o,i;if(!this.closed){this.closed=!0;var s=this._parentage;if(s)if(this._parentage=null,Array.isArray(s))try{for(var a=Ee(s),c=a.next();!c.done;c=a.next()){var f=c.value;f.remove(this)}}catch(v){t={error:v}}finally{try{c&&!c.done&&(r=a.return)&&r.call(a)}finally{if(t)throw t.error}}else s.remove(this);var u=this.initialTeardown;if(C(u))try{u()}catch(v){i=v instanceof It?v.errors:[v]}var p=this._finalizers;if(p){this._finalizers=null;try{for(var m=Ee(p),d=m.next();!d.done;d=m.next()){var h=d.value;try{mn(h)}catch(v){i=i!=null?i:[],v instanceof It?i=D(D([],W(i)),W(v.errors)):i.push(v)}}}catch(v){n={error:v}}finally{try{d&&!d.done&&(o=m.return)&&o.call(m)}finally{if(n)throw n.error}}}if(i)throw new It(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)mn(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&Ve(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&Ve(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Sr=Ie.EMPTY;function jt(e){return e instanceof Ie||e&&"closed"in e&&C(e.remove)&&C(e.add)&&C(e.unsubscribe)}function mn(e){C(e)?e():e.unsubscribe()}var Le={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],n=2;n0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,s=o.isStopped,a=o.observers;return i||s?Sr:(this.currentObservers=null,a.push(r),new Ie(function(){n.currentObservers=null,Ve(a,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,s=n.isStopped;o?r.error(i):s&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,n){return new En(r,n)},t}(F);var En=function(e){ie(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Sr},t}(x);var Et={now:function(){return(Et.delegate||Date).now()},delegate:void 0};var wt=function(e){ie(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=Et);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,s=n._infiniteTimeWindow,a=n._timestampProvider,c=n._windowTime;o||(i.push(r),!s&&i.push(a.now()+c)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,s=o._buffer,a=s.slice(),c=0;c0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var s=r.actions;n!=null&&((i=s[s.length-1])===null||i===void 0?void 0:i.id)!==n&&(ut.cancelAnimationFrame(n),r._scheduled=void 0)},t}(Wt);var Tn=function(e){ie(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Dt);var Te=new Tn(Sn);var _=new F(function(e){return e.complete()});function Vt(e){return e&&C(e.schedule)}function Cr(e){return e[e.length-1]}function Ye(e){return C(Cr(e))?e.pop():void 0}function Oe(e){return Vt(Cr(e))?e.pop():void 0}function zt(e,t){return typeof Cr(e)=="number"?e.pop():t}var pt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Nt(e){return C(e==null?void 0:e.then)}function qt(e){return C(e[ft])}function Kt(e){return Symbol.asyncIterator&&C(e==null?void 0:e[Symbol.asyncIterator])}function Qt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Ni(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Yt=Ni();function Gt(e){return C(e==null?void 0:e[Yt])}function Bt(e){return pn(this,arguments,function(){var r,n,o,i;return $t(this,function(s){switch(s.label){case 0:r=e.getReader(),s.label=1;case 1:s.trys.push([1,,9,10]),s.label=2;case 2:return[4,et(r.read())];case 3:return n=s.sent(),o=n.value,i=n.done,i?[4,et(void 0)]:[3,5];case 4:return[2,s.sent()];case 5:return[4,et(o)];case 6:return[4,s.sent()];case 7:return s.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Jt(e){return C(e==null?void 0:e.getReader)}function U(e){if(e instanceof F)return e;if(e!=null){if(qt(e))return qi(e);if(pt(e))return Ki(e);if(Nt(e))return Qi(e);if(Kt(e))return On(e);if(Gt(e))return Yi(e);if(Jt(e))return Gi(e)}throw Qt(e)}function qi(e){return new F(function(t){var r=e[ft]();if(C(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Ki(e){return new F(function(t){for(var r=0;r=2;return function(n){return n.pipe(e?A(function(o,i){return e(o,i,n)}):de,ge(1),r?He(t):Vn(function(){return new Zt}))}}function zn(){for(var e=[],t=0;t=2,!0))}function pe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new x}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,s=i===void 0?!0:i,a=e.resetOnRefCountZero,c=a===void 0?!0:a;return function(f){var u,p,m,d=0,h=!1,v=!1,G=function(){p==null||p.unsubscribe(),p=void 0},oe=function(){G(),u=m=void 0,h=v=!1},N=function(){var T=u;oe(),T==null||T.unsubscribe()};return y(function(T,Qe){d++,!v&&!h&&G();var De=m=m!=null?m:r();Qe.add(function(){d--,d===0&&!v&&!h&&(p=$r(N,c))}),De.subscribe(Qe),!u&&d>0&&(u=new rt({next:function($e){return De.next($e)},error:function($e){v=!0,G(),p=$r(oe,o,$e),De.error($e)},complete:function(){h=!0,G(),p=$r(oe,s),De.complete()}}),U(T).subscribe(u))})(f)}}function $r(e,t){for(var r=[],n=2;ne.next(document)),e}function K(e,t=document){return Array.from(t.querySelectorAll(e))}function z(e,t=document){let r=ce(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function ce(e,t=document){return t.querySelector(e)||void 0}function _e(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function tr(e){return L(b(document.body,"focusin"),b(document.body,"focusout")).pipe(ke(1),l(()=>{let t=_e();return typeof t!="undefined"?e.contains(t):!1}),V(e===_e()),B())}function Xe(e){return{x:e.offsetLeft,y:e.offsetTop}}function Qn(e){return L(b(window,"load"),b(window,"resize")).pipe(Ce(0,Te),l(()=>Xe(e)),V(Xe(e)))}function rr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return L(b(e,"scroll"),b(window,"resize")).pipe(Ce(0,Te),l(()=>rr(e)),V(rr(e)))}var Gn=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!Dr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),ga?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!Dr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=va.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Bn=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Xn=typeof WeakMap!="undefined"?new WeakMap:new Gn,Zn=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=ya.getInstance(),n=new Aa(t,r,this);Xn.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){Zn.prototype[e]=function(){var t;return(t=Xn.get(this))[e].apply(t,arguments)}});var Ca=function(){return typeof nr.ResizeObserver!="undefined"?nr.ResizeObserver:Zn}(),eo=Ca;var to=new x,Ra=$(()=>k(new eo(e=>{for(let t of e)to.next(t)}))).pipe(g(e=>L(ze,k(e)).pipe(R(()=>e.disconnect()))),J(1));function he(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ye(e){return Ra.pipe(S(t=>t.observe(e)),g(t=>to.pipe(A(({target:r})=>r===e),R(()=>t.unobserve(e)),l(()=>he(e)))),V(he(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function ar(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var ro=new x,ka=$(()=>k(new IntersectionObserver(e=>{for(let t of e)ro.next(t)},{threshold:0}))).pipe(g(e=>L(ze,k(e)).pipe(R(()=>e.disconnect()))),J(1));function sr(e){return ka.pipe(S(t=>t.observe(e)),g(t=>ro.pipe(A(({target:r})=>r===e),R(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function no(e,t=16){return dt(e).pipe(l(({y:r})=>{let n=he(e),o=bt(e);return r>=o.height-n.height-t}),B())}var cr={drawer:z("[data-md-toggle=drawer]"),search:z("[data-md-toggle=search]")};function oo(e){return cr[e].checked}function Ke(e,t){cr[e].checked!==t&&cr[e].click()}function Ue(e){let t=cr[e];return b(t,"change").pipe(l(()=>t.checked),V(t.checked))}function Ha(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Pa(){return L(b(window,"compositionstart").pipe(l(()=>!0)),b(window,"compositionend").pipe(l(()=>!1))).pipe(V(!1))}function io(){let e=b(window,"keydown").pipe(A(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:oo("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),A(({mode:t,type:r})=>{if(t==="global"){let n=_e();if(typeof n!="undefined")return!Ha(n,r)}return!0}),pe());return Pa().pipe(g(t=>t?_:e))}function le(){return new URL(location.href)}function ot(e){location.href=e.href}function ao(){return new x}function so(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)so(e,r)}function M(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)so(n,o);return n}function fr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function co(){return location.hash.substring(1)}function Vr(e){let t=M("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function $a(e){return L(b(window,"hashchange"),e).pipe(l(co),V(co()),A(t=>t.length>0),J(1))}function fo(e){return $a(e).pipe(l(t=>ce(`[id="${t}"]`)),A(t=>typeof t!="undefined"))}function zr(e){let t=matchMedia(e);return er(r=>t.addListener(()=>r(t.matches))).pipe(V(t.matches))}function uo(){let e=matchMedia("print");return L(b(window,"beforeprint").pipe(l(()=>!0)),b(window,"afterprint").pipe(l(()=>!1))).pipe(V(e.matches))}function Nr(e,t){return e.pipe(g(r=>r?t():_))}function ur(e,t={credentials:"same-origin"}){return ue(fetch(`${e}`,t)).pipe(fe(()=>_),g(r=>r.status!==200?Tt(()=>new Error(r.statusText)):k(r)))}function We(e,t){return ur(e,t).pipe(g(r=>r.json()),J(1))}function po(e,t){let r=new DOMParser;return ur(e,t).pipe(g(n=>n.text()),l(n=>r.parseFromString(n,"text/xml")),J(1))}function pr(e){let t=M("script",{src:e});return $(()=>(document.head.appendChild(t),L(b(t,"load"),b(t,"error").pipe(g(()=>Tt(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),R(()=>document.head.removeChild(t)),ge(1))))}function lo(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function mo(){return L(b(window,"scroll",{passive:!0}),b(window,"resize",{passive:!0})).pipe(l(lo),V(lo()))}function ho(){return{width:innerWidth,height:innerHeight}}function bo(){return b(window,"resize",{passive:!0}).pipe(l(ho),V(ho()))}function vo(){return Q([mo(),bo()]).pipe(l(([e,t])=>({offset:e,size:t})),J(1))}function lr(e,{viewport$:t,header$:r}){let n=t.pipe(Z("size")),o=Q([n,r]).pipe(l(()=>Xe(e)));return Q([r,t,o]).pipe(l(([{height:i},{offset:s,size:a},{x:c,y:f}])=>({offset:{x:s.x-c,y:s.y-f+i},size:a})))}(()=>{function e(n,o){parent.postMessage(n,o||"*")}function t(...n){return n.reduce((o,i)=>o.then(()=>new Promise(s=>{let a=document.createElement("script");a.src=i,a.onload=s,document.body.appendChild(a)})),Promise.resolve())}var r=class extends EventTarget{constructor(n){super(),this.url=n,this.m=i=>{i.source===this.w&&(this.dispatchEvent(new MessageEvent("message",{data:i.data})),this.onmessage&&this.onmessage(i))},this.e=(i,s,a,c,f)=>{if(s===`${this.url}`){let u=new ErrorEvent("error",{message:i,filename:s,lineno:a,colno:c,error:f});this.dispatchEvent(u),this.onerror&&this.onerror(u)}};let o=document.createElement("iframe");o.hidden=!0,document.body.appendChild(this.iframe=o),this.w.document.open(),this.w.document.write(` - + diff --git a/documentation/compose/koji/index.html b/documentation/compose/koji/index.html index fd58547..0831c27 100644 --- a/documentation/compose/koji/index.html +++ b/documentation/compose/koji/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -723,7 +729,7 @@ - + diff --git a/documentation/empanadas/index.html b/documentation/empanadas/index.html index 7163174..e4e07c5 100644 --- a/documentation/empanadas/index.html +++ b/documentation/empanadas/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -986,7 +992,7 @@ options.

- + diff --git a/documentation/index.html b/documentation/index.html index 94b881f..543ec24 100644 --- a/documentation/index.html +++ b/documentation/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -845,7 +851,7 @@ all the repositories down.

- + diff --git a/documentation/peridot/index.html b/documentation/peridot/index.html index f33199b..720406c 100644 --- a/documentation/peridot/index.html +++ b/documentation/peridot/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -722,7 +728,7 @@ - + diff --git a/documentation/rebuild/index.html b/documentation/rebuild/index.html index 35c3e3f..f1d8ca2 100644 --- a/documentation/rebuild/index.html +++ b/documentation/rebuild/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -719,7 +725,7 @@ - + diff --git a/documentation/references/empanadas_common/index.html b/documentation/references/empanadas_common/index.html index d5dd66b..28c3ca4 100644 --- a/documentation/references/empanadas_common/index.html +++ b/documentation/references/empanadas_common/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -1047,7 +1053,7 @@ ALLOWED_TYPE_VARIANTS = { - + diff --git a/documentation/references/empanadas_config/index.html b/documentation/references/empanadas_config/index.html index 29c4f23..f40a768 100644 --- a/documentation/references/empanadas_config/index.html +++ b/documentation/references/empanadas_config/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -1567,7 +1573,7 @@ ISO images if applicable.

- + diff --git a/documentation/references/empanadas_sig_config/index.html b/documentation/references/empanadas_sig_config/index.html index 8c4b0eb..ddf25ac 100644 --- a/documentation/references/empanadas_sig_config/index.html +++ b/documentation/references/empanadas_sig_config/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -771,7 +777,7 @@ sync to work.

- + diff --git a/documentation/references/index.html b/documentation/references/index.html index 715a90f..642135c 100644 --- a/documentation/references/index.html +++ b/documentation/references/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -723,7 +729,7 @@ - + diff --git a/include/resources_bottom/index.html b/include/resources_bottom/index.html index 5d12abe..e7fcaf7 100644 --- a/include/resources_bottom/index.html +++ b/include/resources_bottom/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -802,7 +808,7 @@ - + diff --git a/index.html b/index.html index b00bb0f..1d4ad65 100644 --- a/index.html +++ b/index.html @@ -17,7 +17,7 @@ - + @@ -25,10 +25,10 @@ - + - + @@ -114,25 +114,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -827,7 +833,7 @@ - + diff --git a/members/index.html b/members/index.html index 152f3e0..584f0a3 100644 --- a/members/index.html +++ b/members/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -781,7 +787,7 @@ - + diff --git a/search/search_index.json b/search/search_index.json index 8bfdb4d..4a9d5d9 100644 --- a/search/search_index.json +++ b/search/search_index.json @@ -1 +1 @@ -{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"SIG/Core Wiki","text":""},{"location":"#about","title":"About","text":"

The Rocky Linux Core Special Interest Group (SIG/Core) dedicates themselves to the development, building, management, production, and release of Rocky Linux for the Enterprise Linux community and the many users around the world. This group is a mixture of core Rocky Linux developers and infrastructure and the members of this group are also members of other groups within the Rocky Linux community (such as SIG/AltArch) as well as the Enterprise Linux community as a whole.

"},{"location":"#mission","title":"Mission","text":"

SIG/Core strives to ensure a stable distribution is developed, built, tested, and provided to the community from the RESF as a compatible derivative of Red Hat Enterprise Linux. To achieve this goal, some of the things we do are:

  • Ensuring a quality and fully compatible release product
  • Developing and iterating on the build systems and architecture
  • Developing all code in the open
  • Setting the technical direction for the build system architecture
  • Release of beta and final products to the end users and mirrors
  • Release of timely updates to the end users and mirrors

See the What We Do page for a more detailed explanation of our activities.

"},{"location":"#getting-in-touch-contributing","title":"Getting In Touch / Contributing","text":"

There are various ways to get in touch with SIG/Core and provide help, assistance, or even just ideas that can benefit us or the entire community.

  • Chat

    • Mattermost: ~development on Mattermost
    • IRC: #rockylinux and #rockylinux-devel on libera.chat
    • Matrix: Rocky Linux General / Support and Rocky Linux Development
  • RESF SIG/Core Issue Tracker

  • Mail List

For a list of our members, see the Members page.

"},{"location":"#resources-and-rocky-linux-policies","title":"Resources and Rocky Linux Policies","text":"
  • RESF Git Service
  • Rocky Linux GitHub
  • Rocky Linux GitLab
  • Rocky Linux Image Guide
  • Rocky Linux Repository Guide
  • Rocky Linux Release Version Guide/Policy
  • Special Interest Groups.
"},{"location":"#general-packaging-resources","title":"General Packaging Resources","text":"
  • RPM Packaging Guide
  • Fedora Packaging Guidelines
  • Basic Packaging Tutorial
"},{"location":"members/","title":"Members","text":"

SIG/Core is a mix of Release Engineering and Infrastructure members to ensure a high quality release of Rocky Linux as well as the uptime of the services provided to the community. The current members of this group are listed in the table below.

Role Name Email Mattermost Name IRC Name Identity Management & Release Engineering Co-Lead Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Release Engineering Co-Lead Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Release Engineering and Development Skip Grube skip@rockylinux.org @skip77 Release Engineering and Development Sherif Nagy sherif@rockylinux.org @sherif Release Engineering and Development Pablo Greco pgreco@rockylinux.org @pgreco pgreco Infrastructure Lead Neil Hanlon neil@resf.org @neil neil Infrastructure Lead Taylor Goodwill tg@resf.org @tgo tg"},{"location":"what_we_do/","title":"What We Do","text":"

SIG/Core (or Release Engineering) was brought together as a combination of varying expertise (development and infrastructure) to try to fill in gaps of knowledge but to also to ensure the primary goal of having a stable release of Rocky Linux is reached.

Some of the things we do in pursuit of our mission goals:

  • Continuous preparation for upcoming changes from upstream (Fedora and CentOS Stream)
  • Distribution release and maintenance
  • Design and development work to integrate all components together
  • Maintenance of the infrastructure used to build and maintain Rocky Linux (such as ansible roles and playbooks)
  • Working with the testing team with images and a platform to test
  • Providing resources for Special Interest Groups
  • Providing assistance and resources for users within the community
"},{"location":"documentation/","title":"Release General Overview","text":"

This section goes over at a high level how we compose releases for Rocky Linux. As most of our tools are home grown, we have made sure that the tools are open source and in our git services.

This page should serve as an idea of the steps we generally take and we hope that other projects out there who wish to also use our tools can make sure they can use them in this same way, whether they want to be an Enterprise Linux derivative or another project entirely.

"},{"location":"documentation/#build-system-and-tools","title":"Build System and Tools","text":"

The tools in use for the distribution are in the table below.

Tool Maintainer Code Location srpmproc SIG/Core at RESF GitHub empanadas SIG/Core at RESF sig-core-toolkit Peridot SIG/Core at RESF GitHub MirrorManager 2 Fedora Project MirrorManager2

For Rocky Linux to be build, we use Peridot as the build system and empanadas to \"compose\" the distribution. As we do not use Koji for Rocky Linux beyond version 9, pungi can no longer be used. Peridot instead takes pungi configuration data and comps and transforms them into a format it can understand. Empanadas then comes in to do the \"compose\" and sync all the repositories down.

"},{"location":"documentation/#full-compose-major-or-minor-releases","title":"Full Compose (major or minor releases)","text":"

Step by step, it looks like this:

  • Distribution is built and maintained in Peridot
  • Comps and pungi configuration is converted into the peridot format for the project
  • Repositories are created in yumrepofs based on the configuration provided
  • A repoclosure is ran against the repositories from empanadas to ensure there are no critical issues
  • In Parallel:

    • Repositories are synced as a \"full run\" in empanadas
    • Lorax is ran using empanadas in the peridot cluster
  • Lorax results are pulled down from an S3 bucket

  • DVD images are built for each architecture
  • Compose directory is synced to staging for verification
  • Staging is synced to production to allow mirror syncing
  • Bit flip on release day
"},{"location":"documentation/#general-updates","title":"General Updates","text":"

Step by step, it looks like this:

  • Distribution is maintained in Peridot
  • Updates are built, repos are then \"hashed\" in yumrepofs
  • Empanadas syncs updates as needed, either per repo or all repos at once
  • Updates are synced to staging to be verified
  • Staging is synced to production to allow mirror syncing
"},{"location":"documentation/empanadas/","title":"Empanadas","text":"

This page goes over empanadas, which is part of the SIG/Core toolkit. Empanadas assists SIG/Core is composing repositories, creating ISO's, creating images, and various other activities in Rocky Linux. It is also used for general testing and debugging of repositories and its metadata.

"},{"location":"documentation/empanadas/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact releng@rockylinux.org Mattermost Contacts @label @neil Mattermost Channels ~Development"},{"location":"documentation/empanadas/#general-information","title":"General Information","text":"

empanadas is a python project using poetry, containing various built-in modules with the goal to try to emulate the Fedora Project's pungi to an extent. While it is not perfect, it achieves the very basic goals of creating repositories, images and ISO's for consumption by the end user. It also has interactions with peridot, the build system used by the RESF to build the Rocky Linux distribution.

For performing syncs, it relies on the use of podman to perform syncing in a parallel fashion. This was done because it is not possible to run multiple dnf transactions at once on a single system and looping one repository at a time is not sustainable (nor fast).

"},{"location":"documentation/empanadas/#requirements","title":"Requirements","text":"
  • Poetry must be installed on the system
  • Podman must be installed on the system
  • fpart must be installed on the system (available in EPEL on EL systems)
  • Enough storage should be available if repositories are being synced
  • mock must be installed if building live images
  • System must be an Enterprise Linux system or Fedora with the %rhel macro set
"},{"location":"documentation/empanadas/#features","title":"Features","text":"

As of this writing, empanadas has the following abilities:

  • Repository syncing via dnf from a peridot instance or applicable repos
  • Per profile dnf repoclosure checking for all applicable repos
  • Per profile dnf repoclosure checking for peridot instance repositories
  • Basic ISO Building via lorax
  • Extra ISO Building via xorriso for DVD and minimal images
  • Live ISO Building using livemedia-creator and mock
  • Anaconda treeinfo builder
  • Cloud Image builder
"},{"location":"documentation/empanadas/#installing-empanadas","title":"Installing Empanadas","text":"

The below is how to install empanadas from the development branch on a Fedora system.

% dnf install git podman fpart poetry mock -y\n% git clone https://git.resf.org/sig_core/toolkit.git -b devel\n% cd toolkit/iso/empanadas\n% poetry install\n
"},{"location":"documentation/empanadas/#configuring-empanadas","title":"Configuring Empanadas","text":"

Depending on how you are using empanadas will depend on how your configurations will be setup.

  • empanadas/common.py
  • empanadas/config/*.yaml
  • empanadas/sig/*.yaml

These configuration files are delicate and can control a wide variety of the moving parts of empanadas. As these configurations are fairly massive, we recommend checking the reference guides for deeper details into configuring for base distribution or \"SIG\" content.

"},{"location":"documentation/empanadas/#using-empanadas","title":"Using Empanadas","text":"

The most common way to use empanadas is to sync repositories from a peridot instance. This is performed upon each release or on each set of updates as they come from upstream. Below lists how to use empanadas, as well as the common options.

Note that for each of these commands, it is fully expected you are running poetry run in the root of empanadas.

# Syncs all repositoryes for the \"9\" release\n% poetry run sync_from_peridot --release 9 --clean-old-packages\n\n# Syncs only the BaseOS repository without syncing sources\n% poetry run sync_from_peridot --release 9 --clean-old-packages --repo BaseOS --ignore-source\n\n# Syncs only AppStream for ppc64le\n% poetry run sync_from_peridot --release 9 --clean-old-packages --repo AppStream --arch ppc64le\n
Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"documentation/peridot/","title":"Peridot Build System","text":"

This page goes over the Peridot Build System and how SIG/Core utilizes it.

More to come.

"},{"location":"documentation/rebuild/","title":"Rebuild Version Bump","text":"

In some cases, a package has to be rebuilt. A package may be rebuilt for these reasons:

  • Underlying libraries have been rebased
  • ABI changes that require a rebuild (mass rebuilds, though they are rare)
  • New architecture added to a project

This typically applies to packages being built from a given src subgroup. Packages pulled from upstream don't fall into this category in normal circumstances. In those cases, they receive .0.1 and so on as standalone rebuilds.

"},{"location":"documentation/compose/","title":"Composing and Managing Releases","text":"

This section goes over the process of composing a release from a bunch of packages to repositories, to images. This section also goes over the basics of working with koji when necessary.

"},{"location":"documentation/compose/koji/","title":"Updates and Management in Koji, A Manual","text":"

More to come.

"},{"location":"documentation/references/","title":"References","text":"

Use this section to locate reference configuration items for the toolkit.

"},{"location":"documentation/references/empanadas_common/","title":"Empanadas common.py Configuration","text":"

The common.py configuration contains dictionaries and classes that dictate most of the functionality of empanadas.

"},{"location":"documentation/references/empanadas_common/#config-items","title":"Config Items","text":"

type: Dictionary

"},{"location":"documentation/references/empanadas_common/#configrlmacro","title":"config.rlmacro","text":"

type: String

required: True

description: Empanadas expects to run on an EL system. This is part of the general check up. It should not be hardcoded and use the rpm python module.

"},{"location":"documentation/references/empanadas_common/#configdist","title":"config.dist","text":"

type: String

required: False

description: Was the original tag placed in mock configs. This combines el with the rpm python module expansion. This is no longer required. The option is still available for future use.

"},{"location":"documentation/references/empanadas_common/#configarch","title":"config.arch","text":"

type: String

required: True

description: The architecture of the current running system. This is checked against the supported architectures in general release configurations. This should not be hardcoded.

"},{"location":"documentation/references/empanadas_common/#configdate_stamp","title":"config.date_stamp","text":"

type: String

required: True

description: Date time stamp in the form of YYYYMMDD.HHMMSS. This should not be hardcoded.

"},{"location":"documentation/references/empanadas_common/#configcompose_root","title":"config.compose_root","text":"

type: String

required: True

description: Root path of composes on the system running empanadas.

"},{"location":"documentation/references/empanadas_common/#configstaging_root","title":"config.staging_root","text":"

type: String

required: False

description: For future use. Root path of staging repository location where content will be synced to.

"},{"location":"documentation/references/empanadas_common/#configproduction_root","title":"config.production_root","text":"

type: String

required: False

description: For future use. Root path of production repository location where content will be synced to from staging.

"},{"location":"documentation/references/empanadas_common/#configcategory_stub","title":"config.category_stub","text":"

type: String

required: True

description: For future use. Stub path that is appended to staging_root and production_root.

example: mirror/pub/rocky

"},{"location":"documentation/references/empanadas_common/#configsig_category_stub","title":"config.sig_category_stub","text":"

type: String

required: True

description: For future use. Stub path that is appended to staging_root and production_root for SIG content.

example: mirror/pub/sig

"},{"location":"documentation/references/empanadas_common/#configrepo_base_url","title":"config.repo_base_url","text":"

type: String

required: True

description: URL to the base url's where the repositories live. This is typically to a peridot instance. This is supplemented by the configuration project_id parameter.

Note that this does not have to be a peridot instance. The combination of this value and project_id can be sufficient enough for empanadas to perform its work.

"},{"location":"documentation/references/empanadas_common/#configmock_work_root","title":"config.mock_work_root","text":"

type: String

required: True

description: Hardcoded path to where ISO work is performed within a mock chroot. This is the default path created by mock and it is recommended not to change this.

example: /builddir

"},{"location":"documentation/references/empanadas_common/#configcontainer","title":"config.container","text":"

type: String

required: True

description: This is the container used to perform all operations in podman.

example: centos:stream9

"},{"location":"documentation/references/empanadas_common/#configdistname","title":"config.distname","text":"

type: String

required: True

description: Name of the distribution you are building or building for.

example: Rocky Linux

"},{"location":"documentation/references/empanadas_common/#configshortname","title":"config.shortname","text":"

type: String

required: True

description: Short name of the distribution you are building or building for.

example: Rocky

"},{"location":"documentation/references/empanadas_common/#configtranslators","title":"config.translators","text":"

type: Dictionary

required: True

description: Translates Linux architectures to golang architectures. Reserved for future use.

"},{"location":"documentation/references/empanadas_common/#configaws_region","title":"config.aws_region","text":"

type: String

required: False

description: Region you are working in with AWS or onprem cloud that supports this variable.

example: us-east-2

"},{"location":"documentation/references/empanadas_common/#configbucket","title":"config.bucket","text":"

type: String

required: False

description: Name of the S3-compatible bucket that is used to pull images from. Requires aws_region.

"},{"location":"documentation/references/empanadas_common/#configbucket_url","title":"config.bucket_url","text":"

type: String

required: False

description: URL of the S3-compatible bucket that is used to pull images from.

"},{"location":"documentation/references/empanadas_common/#allowed_type_variants-items","title":"allowed_type_variants items","text":"

type: Dictionary

description: Key value pairs of cloud or image variants. The value is either None or a list type.

"},{"location":"documentation/references/empanadas_common/#reference-example","title":"Reference Example","text":"
config = {\n    \"rlmacro\": rpm.expandMacro('%rhel'),\n    \"dist\": 'el' + rpm.expandMacro('%rhel'),\n    \"arch\": platform.machine(),\n    \"date_stamp\": time.strftime(\"%Y%m%d.%H%M%S\", time.localtime()),\n    \"compose_root\": \"/mnt/compose\",\n    \"staging_root\": \"/mnt/repos-staging\",\n    \"production_root\": \"/mnt/repos-production\",\n    \"category_stub\": \"mirror/pub/rocky\",\n    \"sig_category_stub\": \"mirror/pub/sig\",\n    \"repo_base_url\": \"https://yumrepofs.build.resf.org/v1/projects\",\n    \"mock_work_root\": \"/builddir\",\n    \"container\": \"centos:stream9\",\n    \"distname\": \"Rocky Linux\",\n    \"shortname\": \"Rocky\",\n    \"translators\": {\n        \"x86_64\": \"amd64\",\n        \"aarch64\": \"arm64\",\n        \"ppc64le\": \"ppc64le\",\n        \"s390x\": \"s390x\",\n        \"i686\": \"386\"\n    },\n    \"aws_region\": \"us-east-2\",\n    \"bucket\": \"resf-empanadas\",\n    \"bucket_url\": \"https://resf-empanadas.s3.us-east-2.amazonaws.com\"\n}\n\nALLOWED_TYPE_VARIANTS = {\n        \"Azure\": None,\n        \"Container\": [\"Base\", \"Minimal\", \"UBI\"],\n        \"EC2\": None,\n        \"GenericCloud\": None,\n        \"Vagrant\": [\"Libvirt\", \"Vbox\"],\n        \"OCP\": None\n\n}\n
"},{"location":"documentation/references/empanadas_config/","title":"Empanadas config yaml Configuration","text":"

Each file in empanads/config/ is a yaml file that contains configuration items for the distribution release version. The configuration can heavily dictate the functionality and what features are directly supported by empanadas when ran.

See the items below to see which options are mandatory and optional.

"},{"location":"documentation/references/empanadas_config/#config-items","title":"Config Items","text":""},{"location":"documentation/references/empanadas_config/#top-level","title":"Top Level","text":"

The Top Level is the name of the profile and starts the YAML dictionary for the release. It is alphanumeric and accepts punctuation within reason. Common examples:

  • 9
  • 9-beta
  • 8-lookahead
"},{"location":"documentation/references/empanadas_config/#fullname","title":"fullname","text":"

type: String

required: True

description: Needed for treeinfo and discinfo generation.

"},{"location":"documentation/references/empanadas_config/#revision","title":"revision","text":"

type: String

required: True

description: Full version of a release

"},{"location":"documentation/references/empanadas_config/#rclvl","title":"rclvl","text":"

type: String

required: True

description: Release Candidate or Beta descriptor. Sets names and versions with this descriptor if enabled.

"},{"location":"documentation/references/empanadas_config/#major","title":"major","text":"

type: String

required: True

description: Major version of a release

"},{"location":"documentation/references/empanadas_config/#minor","title":"minor","text":"

type: String

required: True

description: Minor version of a release

"},{"location":"documentation/references/empanadas_config/#profile","title":"profile","text":"

type: String

required: True

description: Matches the top level of the release. This should not differ from the top level assignment.

"},{"location":"documentation/references/empanadas_config/#disttag","title":"disttag","text":"

type: String

required: True

description: Sets the dist tag for mock configs.

"},{"location":"documentation/references/empanadas_config/#bugurl","title":"bugurl","text":"

type: String

required: True

description: A URL to the bug tracker for this release or distribution.

"},{"location":"documentation/references/empanadas_config/#checksum","title":"checksum","text":"

type: String

required: True

description: Checksum type. Used when generating checksum information for images.

"},{"location":"documentation/references/empanadas_config/#fedora_major","title":"fedora_major","text":"

type: String

required: False

description: For future use with icicle.

"},{"location":"documentation/references/empanadas_config/#allowed_arches","title":"allowed_arches","text":"

type: list

required: True

description: List of supported architectures for this release.

"},{"location":"documentation/references/empanadas_config/#provide_multilib","title":"provide_multilib","text":"

type: boolean

required: True

description: Sets if architecture x86_64 will be multilib. It is recommended that this is set to True.

"},{"location":"documentation/references/empanadas_config/#project_id","title":"project_id","text":"

type: String

required: True

description: Appended to the base repo URL in common.py. For peridot, it is the project id that is generated for the project you are pulling from. It can be set to anything else if need be for non-peridot use.

"},{"location":"documentation/references/empanadas_config/#repo_symlinks","title":"repo_symlinks","text":"

type: dict

required: False

description: For future use. Sets symlinks to repositories for backwards compatibility. Key value pairs only.

"},{"location":"documentation/references/empanadas_config/#renames","title":"renames","text":"

type: dict

required: False

description: Renames a repository to the value set. For example, renaming all to devel. Set to {} if no renames are goign to occur.

"},{"location":"documentation/references/empanadas_config/#all_repos","title":"all_repos","text":"

type: list

required: True

description: List of repositories that will be synced/managed by empanadas.

"},{"location":"documentation/references/empanadas_config/#structure","title":"structure","text":"

type: dict

required: True

description: Key value pairs of packages and repodata. These are appended appropriately during syncing and ISO actions. Setting these are mandatory.

"},{"location":"documentation/references/empanadas_config/#iso_map","title":"iso_map","text":"

type: dictionary

required: True if building ISO's and operating with lorax.

description: Controls how lorax and extra ISO's are built.

If are you not building images, set to {}

"},{"location":"documentation/references/empanadas_config/#xorrisofs","title":"xorrisofs","text":"

type: boolean

required: True

description: Dictates of xorrisofs is used to build images. Setting to false uses genisoimage. It is recommended that xorrisofs is used.

"},{"location":"documentation/references/empanadas_config/#iso_level","title":"iso_level","text":"

type: boolean

required: True

description: Set to false if you are using xorrisofs. Can be set to true when using genisoimage.

"},{"location":"documentation/references/empanadas_config/#images","title":"images","text":"

type: dict

required: True

description: Dictates the ISO images that will be made or the treeinfo that will be generated.

Note: The primary repository (for example, BaseOS) will need to be listed to ensure the treeinfo data is correctly generated. disc should be set to False and isoskip should be set to True. See the example section for an example.

"},{"location":"documentation/references/empanadas_config/#namedisc","title":"name.disc","text":"

type: boolean

required: True

description: This tells the iso builder if this will be a generated ISO.

"},{"location":"documentation/references/empanadas_config/#nameisoskip","title":"name.isoskip","text":"

type: boolean

required: False

description: This tells the iso builder if this will be skipped, even if disc is set to True. Default is False.

"},{"location":"documentation/references/empanadas_config/#namevariant","title":"name.variant","text":"

type: string

required: True

description: Names the primary variant repository for the image. This is set in .treeinfo.

"},{"location":"documentation/references/empanadas_config/#namerepos","title":"name.repos","text":"

type: list

required: True

description: Names of the repositories included in the image. This is added to .treeinfo.

"},{"location":"documentation/references/empanadas_config/#namevolname","title":"name.volname","text":"

type: string

required: True

required value: dvd

description: This is required if building more than the DVD image. By default, the the name dvd is harcoded in the buildImage template.

"},{"location":"documentation/references/empanadas_config/#lorax","title":"lorax","text":"

type: dict

required: True if building lorax images.

description: Sets up lorax images and which repositories to use when building lorax images.

"},{"location":"documentation/references/empanadas_config/#loraxrepos","title":"lorax.repos","text":"

type: list

required: True

description: List of repos that are used to pull packages to build the lorax images.

"},{"location":"documentation/references/empanadas_config/#loraxvariant","title":"lorax.variant","text":"

type: string

required: True

description: Base repository for the release

"},{"location":"documentation/references/empanadas_config/#loraxlorax_removes","title":"lorax.lorax_removes","text":"

type: list

required: False

description: Excludes packages that are not needed when lorax is running.

"},{"location":"documentation/references/empanadas_config/#loraxrequired_pkgs","title":"lorax.required_pkgs","text":"

type: list

required: True

description: Required list of installed packages needed to build lorax images.

"},{"location":"documentation/references/empanadas_config/#livemap","title":"livemap","text":"

type: dict

required: False

description: Dictates what live images are built and how they are built.

"},{"location":"documentation/references/empanadas_config/#livemapgit_repo","title":"livemap.git_repo","text":"

type: string

required: True

description: The git repository URL where the kickstarts live

"},{"location":"documentation/references/empanadas_config/#livemapbranch","title":"livemap.branch","text":"

type: string

required: True

description: The branch being used for the kickstarts

"},{"location":"documentation/references/empanadas_config/#livemapksentry","title":"livemap.ksentry","text":"

type: dict

required: True

description: Key value pairs of the live images being created. Key being the name of the live image, value being the kickstart name/path.

"},{"location":"documentation/references/empanadas_config/#livemapallowed_arches","title":"livemap.allowed_arches","text":"

type: list

required: True

description: List of allowed architectures that will build for the live images.

"},{"location":"documentation/references/empanadas_config/#livemaprequired_pkgs","title":"livemap.required_pkgs","text":"

type: list

required: True

description: Required list of packages needed to build the live images.

"},{"location":"documentation/references/empanadas_config/#cloudimages","title":"cloudimages","text":"

type: dict

required: False

description: Cloud related settings.

Set to {} if not needed.

"},{"location":"documentation/references/empanadas_config/#cloudimagesimages","title":"cloudimages.images","text":"

type: dict

required: True

description: Cloud images that will be generated and in a bucket to be pulled, and their format.

"},{"location":"documentation/references/empanadas_config/#cloudimagesimagesname","title":"cloudimages.images.name","text":"

type: dict

required: True

description: Name of the cloud image being pulled.

Accepted key value options:

  • format, which is raw, qcow2, vhd, tar.xz
  • variants, which is a list
  • primary_variant, which symlinks to the \"primary\" variant in the variant list
"},{"location":"documentation/references/empanadas_config/#repoclosure_map","title":"repoclosure_map","text":"

type: dict

required: True

description: Repoclosure settings. These settings are absolutely required when doing full syncs and need to check repositories for consistency.

"},{"location":"documentation/references/empanadas_config/#repoclosure_maparches","title":"repoclosure_map.arches","text":"

type: dict

required: True

description: For each architecture (key), dnf switches/settings that dictate how repoclosure will check for consistency (value, string).

example: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'

"},{"location":"documentation/references/empanadas_config/#repoclosure_maprepos","title":"repoclosure_map.repos","text":"

type: dict

required: True

description: For each repository that is pulled for a given release(key), repositories that will be included in the repoclosure check. A repository that only checks against itself must have a value of [].

"},{"location":"documentation/references/empanadas_config/#extra_files","title":"extra_files","text":"

type: dict

required: True

description: Extra files settings and where they come from. Git repositories are the only supported method.

"},{"location":"documentation/references/empanadas_config/#extra_filesgit_repo","title":"extra_files.git_repo","text":"

type: string

required: True

description: URL to the git repository with the extra files.

"},{"location":"documentation/references/empanadas_config/#extra_filesgit_raw_path","title":"extra_files.git_raw_path","text":"

type: string

required: True

description: URL to the git repository with the extra files, but the \"raw\" url form.

example: git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'

"},{"location":"documentation/references/empanadas_config/#extra_filesbranch","title":"extra_files.branch","text":"

type: string

required: True

description: Branch where the extra files are pulled from.

"},{"location":"documentation/references/empanadas_config/#extra_filesgpg","title":"extra_files.gpg","text":"

type: dict

required: True

description: For each gpg key type (key), the relative path to the key in the git repository (value).

These keys help set up the repository configuration when doing syncs.

By default, the RepoSync class sets stable as the gpgkey that is used.

"},{"location":"documentation/references/empanadas_config/#extra_fileslist","title":"extra_files.list","text":"

type: list

required: True

description: List of files from the git repository that will be used as \"extra\" files and placed in the repositories and available to mirrors and will appear on ISO images if applicable.

"},{"location":"documentation/references/empanadas_config/#reference-example","title":"Reference Example","text":"
---\n'9':\n  fullname: 'Rocky Linux 9.0'\n  revision: '9.0'\n  rclvl: 'RC2'\n  major: '9'\n  minor: '0'\n  profile: '9'\n  disttag: 'el9'\n  bugurl: 'https://bugs.rockylinux.org'\n  checksum: 'sha256'\n  fedora_major: '20'\n  allowed_arches:\n    - x86_64\n    - aarch64\n    - ppc64le\n    - s390x\n  provide_multilib: True\n  project_id: '55b17281-bc54-4929-8aca-a8a11d628738'\n  repo_symlinks:\n    NFV: 'nfv'\n  renames:\n    all: 'devel'\n  all_repos:\n    - 'all'\n    - 'BaseOS'\n    - 'AppStream'\n    - 'CRB'\n    - 'HighAvailability'\n    - 'ResilientStorage'\n    - 'RT'\n    - 'NFV'\n    - 'SAP'\n    - 'SAPHANA'\n    - 'extras'\n    - 'plus'\n  structure:\n    packages: 'os/Packages'\n    repodata: 'os/repodata'\n  iso_map:\n    xorrisofs: True\n    iso_level: False\n    images:\n      dvd:\n        disc: True\n        variant: 'AppStream'\n        repos:\n          - 'BaseOS'\n          - 'AppStream'\n      minimal:\n        disc: True\n        isoskip: True\n        repos:\n          - 'minimal'\n          - 'BaseOS'\n        variant: 'minimal'\n        volname: 'dvd'\n      BaseOS:\n        disc: False\n        isoskip: True\n        variant: 'BaseOS'\n        repos:\n          - 'BaseOS'\n          - 'AppStream'\n    lorax:\n      repos:\n        - 'BaseOS'\n        - 'AppStream'\n      variant: 'BaseOS'\n      lorax_removes:\n        - 'libreport-rhel-anaconda-bugzilla'\n      required_pkgs:\n        - 'lorax'\n        - 'genisoimage'\n        - 'isomd5sum'\n        - 'lorax-templates-rhel'\n        - 'lorax-templates-generic'\n        - 'xorriso'\n  cloudimages:\n    images:\n      EC2:\n        format: raw\n      GenericCloud:\n        format: qcow2\n  livemap:\n    git_repo: 'https://git.resf.org/sig_core/kickstarts.git'\n    branch: 'r9'\n    ksentry:\n      Workstation: rocky-live-workstation.ks\n      Workstation-Lite: rocky-live-workstation-lite.ks\n      XFCE: rocky-live-xfce.ks\n      KDE: rocky-live-kde.ks\n      MATE: rocky-live-mate.ks\n    allowed_arches:\n      - x86_64\n      - aarch64\n    required_pkgs:\n      - 'lorax-lmc-novirt'\n      - 'vim-minimal'\n      - 'pykickstart'\n      - 'git'\n  variantmap:\n    git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'\n    branch: 'r9'\n    git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9/'\n  repoclosure_map:\n    arches:\n      x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'\n      aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'\n      ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'\n      s390x: '--forcearch=s390x --arch=s390x --arch=noarch'\n    repos:\n      devel: []\n      BaseOS: []\n      AppStream:\n        - BaseOS\n      CRB:\n        - BaseOS\n        - AppStream\n      HighAvailability:\n        - BaseOS\n        - AppStream\n      ResilientStorage:\n        - BaseOS\n        - AppStream\n      RT:\n        - BaseOS\n        - AppStream\n      NFV:\n        - BaseOS\n        - AppStream\n      SAP:\n        - BaseOS\n        - AppStream\n        - HighAvailability\n      SAPHANA:\n        - BaseOS\n        - AppStream\n        - HighAvailability\n  extra_files:\n    git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'\n    git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'\n    branch: 'r9'\n    gpg:\n      stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'\n      testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'\n    list:\n      - 'SOURCES/Contributors'\n      - 'SOURCES/COMMUNITY-CHARTER'\n      - 'SOURCES/EULA'\n      - 'SOURCES/LICENSE'\n      - 'SOURCES/RPM-GPG-KEY-Rocky-9'\n      - 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'\n...\n
"},{"location":"documentation/references/empanadas_sig_config/","title":"Empanadas SIG yaml Configuration","text":"

Each file in empanads/sig/ is a yaml file that contains configuration items for the distribution release version. The configuration determines the structure of the SIG repositories synced from Peridot or a given repo.

Note that a release profile (for a major version) is still required for this sync to work.

See the items below to see which options are mandatory and optional.

"},{"location":"documentation/references/empanadas_sig_config/#config-items","title":"Config Items","text":""},{"location":"documentation/references/empanadas_sig_config/#reference-example","title":"Reference Example","text":""},{"location":"include/resources_bottom/","title":"Resources bottom","text":"Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"sop/","title":"SOP (Standard Operationg Procedures)","text":"

This section goes over the various SOP's for SIG/Core. Please use the menu items to find the various pages of interest.

"},{"location":"sop/sop_compose/","title":"SOP: Compose and Repo Sync for Rocky Linux and Peridot","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for the distribution. It contains information of the scripts that are utilized and in what order, depending on the use case.

"},{"location":"sop/sop_compose/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose/#related-git-repositories","title":"Related Git Repositories","text":"

There are several git repositories used in the overall composition of a repository or a set of repositories.

Pungi - This repository contains all the necessary pungi configuration files that peridot translates into its own configuration. Pungi is no longer used for Rocky Linux.

Comps - This repository contains all the necessary comps (which are groups and other data) for a given major version. Peridot (and pungi) use this information to properly build repositories.

Toolkit - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.

"},{"location":"sop/sop_compose/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose/#empanadas","title":"Empanadas","text":"

Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.

"},{"location":"sop/sop_compose/#running-a-compose","title":"Running a Compose","text":"

First, the toolkit must be cloned. In the iso/empanadas directory, run poetry install. You'll then have access to the various commands needed:

  • sync_from_peridot
  • build-iso
  • build-iso-extra
  • pull-unpack-tree
  • pull-cloud-image
  • finalize_compose

To perform a full compose, this order is expected (replacing X with major version or config profile)

# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-Rocky-X\npoertry run sync_from_peridot --release X --hashed --repoclosure --full-run\n\n# On each architecture, this must be ran to generate the lorax images\n# !! Use --rc if the image is a release candidate or a beta image\n# Note: This is typically done using kubernetes and uploaded to a bucket\npoetry run build-iso --release X --isolation=None\n\n# The images are pulled from the bucket\npoetry run pull-unpack-tree --release X\n\n# The extra ISO's (usually just DVD) are generated\n# !! Use --rc if the image is a release candidate or a beta image\n# !! Set --extra-iso-mode to mock if desired\n# !! If there is more than the dvd, remove --extra-iso dvd\npoetry run build-iso-extra --release X --extra-iso dvd --extra-iso-mode podman\n\n# This pulls the generic and EC2 cloud images\npoetry run pull-cloud-image --release X\n\n# This ensures everything is closed out for a release. This copies iso's, images,\n# generates metadata, and the like.\n# !! DO NOT RUN DURING INCREMENTAL UPDATES !!\npoetry run finalize_compose --release X\n
"},{"location":"sop/sop_compose/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.

The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.

.\n\u251c\u2500\u2500 common\n\u251c\u2500\u2500 common_8\n\u251c\u2500\u2500 common_9\n

These are for the releases in general. What they do is noted below.

\u251c\u2500\u2500 gen-torrents.sh                  -> Generates torrents for images\n\u251c\u2500\u2500 minor-release-sync-to-staging.sh -> Syncs a minor release to staging\n\u251c\u2500\u2500 prep-staging-X.sh                -> Preps staging updates and signs repos (only for 8)\n\u251c\u2500\u2500 sign-repos-only.sh               -> Signs the repomd (only for 8)\n\u251c\u2500\u2500 sync-file-list-parallel.sh       -> Generates file lists in parallel for mirror sync scripts\n\u251c\u2500\u2500 sync-to-prod.sh                  -> Syncs staging to production\n\u251c\u2500\u2500 sync-to-prod.delete.sh           -> Syncs staging to production (deletes artifacts that are no longer in staging)\n\u251c\u2500\u2500 sync-to-prod-sig.sh              -> Syncs a sig provided compose to production\n\u251c\u2500\u2500 sync-to-staging.sh               -> Syncs a provided compose to staging\n\u251c\u2500\u2500 sync-to-staging.delete.sh        -> Syncs a provided compose to staging (deletes artifacts that are no longer in the compose)\n\u251c\u2500\u2500 sync-to-staging-sig.sh           -> Syncs a sig provided compose to staging\n

Generally, you will only run sync-to-staging.sh or sync-to-staging.delete.sh to sync. The former is for older releases, the latter is for newer releases. Optionally, if you are syncing a \"beta\" or \"lookahead\" release, you will need to also provide the RLREL variable as beta or lookahead.

# The below syncs to staging for Rocky Linux 8\nRLVER=8 bash sync-to-staging.sh Rocky\n# The below syncs to staging for Rocky Linux 9\nRLVER=9 bash sync-to-staging.delete.sh Rocky\n

Once the syncs are done, staging must be tested and vetted before being sent to production. Once staging is completed, it is synced to production.

bash RLVER=8 sync-to-prod.sh\nbash RLVER=9 sync-to-prod.delete.sh\nbash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

Note: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.

"},{"location":"sop/sop_compose_8/","title":"SOP: Compose and Repo Sync for Rocky Linux 8","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Rocky Linux 8. It contains information of the scripts that are utilized and in what order, depending on the use case.

Please see the other SOP for Rocky Linux 9+ that are managed via empanadas and peridot.

"},{"location":"sop/sop_compose_8/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose_8/#related-git-repositories","title":"Related Git Repositories","text":"

There are several git repositories used in the overall composition of a repository or a set of repositories.

Pungi - This repository contains all the necessary pungi configuration files for composes that come from koji. Pungi interacts with koji to build the composes.

Comps - This repository contains all the necessary comps (which are groups and other data) for a given major version. Pungi uses this information to properly build the repositories.

Toolkit - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.

"},{"location":"sop/sop_compose_8/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose_8/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/koji -> Koji files store
  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose_8/#pungi","title":"Pungi","text":"

Each repository or set of repositories are controlled by various pungi configurations. For example, r8.conf will control the absolute base of Rocky Linux 8, which imports other git repository data as well as accompanying json or other configuration files.

"},{"location":"sop/sop_compose_8/#running-a-compose","title":"Running a Compose","text":"

Inside the pungi git repository, the folder scripts contain the necessary scripts that are ran to perform a compose. There are different types of composes:

  • produce -> Generates a full compose, generally used for minor releases, which generate new ISO's
  • update -> Generates a smaller compose, generally used for updates within a minor release cycle - ISO's are not generated

Each script is titled appropriately:

  • produce-X.sh -> Generates a full compose for X major release, typically set to the current minor release according to rX.conf
  • updates-X.sh -> Generates a smaller compose for X major release, typically set to the current minor release according to rX.conf
  • updates-X-NAME.sh -> Generates a compose for the specific compose, such as NFV, Rocky-devel, Extras, or Plus

When these scripts are ran, they generate an appropriate directory under /mnt/compose/X with a directory and an accompanying symlink. For example. If an update to Rocky was made using updates-8.sh, the below would be made:

drwxr-xr-x. 5 root  root  6144 Jul 21 17:44 Rocky-8-updates-20210721.1\nlrwxrwxrwx. 1 root  root    26 Jul 21 18:26 latest-Rocky-8 -> Rocky-8-updates-20210721.1\n

This setup also allows pungi to reuse previous package set data to reduce the time it takes to build a compose. Typically during a new minor release, all composes should be ran so they can be properly combined. Example of a typical order if releasing 8.X:

produce-8.sh\nupdates-8-devel.sh\nupdates-8-extras.sh\nupdates-8-plus.sh\n
"},{"location":"sop/sop_compose_8/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran for a specific purpose. They are also ran in a certain order to ensure integrity and consistency of a release.

The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.

.\n\u251c\u2500\u2500 common\n\u251c\u2500\u2500 common_8\n\u251c\u2500\u2500 common_9\n

These are for the releases in general. What they do is noted below.

\u251c\u2500\u2500 gen-torrents.sh                  -> Generates torrents for images\n\u251c\u2500\u2500 minor-release-sync-to-staging.sh -> Syncs a minor release to staging\n\u251c\u2500\u2500 prep-staging-X.sh                -> Preps staging updates and signs repos\n\u251c\u2500\u2500 sign-repos-only.sh               -> Signs the repomd (only)\n\u251c\u2500\u2500 sync-to-prod.sh                  -> Syncs staging to production\n\u251c\u2500\u2500 sync-to-staging.sh               -> Syncs a provided compose to staging\n\u251c\u2500\u2500 sync-to-staging-sig.sh           -> Syncs a sig provided compose to staging\n

Generally, you will only run minor-release-sync-to-staging.sh when a full minor release is being produced. So for example, if 8.5 has been built out, you would run that after a compose. gen-torrents.sh would be ran shortly after.

When doing updates, the order of operations (preferably) would be:

* sync-to-staging.sh\n* sync-to-staging-sig.sh -> Only if sigs are updated\n* prep-staging-8.sh`     -> This is required to ensure the groups, comps, and\n                            module data stay sane. This helps us provide older\n                            packages in the repos as well as signs repo metadata.\n* sync-to-prod.sh        -> After the initial testing, it is sent to prod.\n

An example of order:

# The below syncs to staging\nRLVER=8 bash sync-to-staging.sh Plus\nRLVER=8 bash sync-to-staging.sh Extras\nRLVER=8 bash sync-to-staging.sh Rocky-devel\nRLVER=8 bash sync-to-staging.sh Rocky\nbash prep-staging-8.sh\n

Once the syncs are done, staging must be tested and vetted before being sent to production. During this stage, the updateinfo.xml is also applied where necessary to the repositories to provide errata. Once staging is completed, it is synced to production.

bash RLVER=8 sync-to-prod.sh\nbash bash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

Note: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.

"},{"location":"sop/sop_compose_sig/","title":"SOP: Compose and Repo Sync for Rocky Linux Special Interest Groups","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Special Interest Groups.

"},{"location":"sop/sop_compose_sig/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose_sig/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose_sig/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose_sig/#empanadas","title":"Empanadas","text":"

Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.

"},{"location":"sop/sop_compose_sig/#running-a-compose","title":"Running a Compose","text":"

First, the toolkit must be cloned. In the iso/empanadas directory, run poetry install. You'll then have access to the various commands needed:

  • sync_sig

To perform a compose of a SIG, it must be defined in the configuration. As an example, here is composing the core sig.

# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-SIG-Y-X\n~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages --full-run\n\n# This assumes the directories already exist and will update in place.\n~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages\n
"},{"location":"sop/sop_compose_sig/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.

For SIG's, the only files you'll need to know of are sync-to-staging-sig.sh and sync-to-prod-sig.sh. Both scripts will delete packages and data that are no longer in the compose.

# The below syncs the core 8 repos to staging\nRLVER=8 bash sync-to-staging-sig.sh core\n# The below syncs the core 9 repos to staging\nRLVER=9 bash sync-to-staging-sig.sh core\n\n# The below syncs everything in staging for 8 core to prod\nRLVER=8 bash sync-to-prod-sig.sh core\n\n# The below syncs everything in staging for 9 core to prod\nRLVER=9 bash sync-to-prod-sig.sh core\n

Once staging is completed and reviewed, it is synced to production.

bash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

"},{"location":"sop/sop_mirrormanager2/","title":"Mirror Manager Maintenance","text":"

This SOP contains most if not all the information needed for SIG/Core to maintain and operate Mirror Manager for Rocky Linux.

"},{"location":"sop/sop_mirrormanager2/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo Mattermost Channels ~Infrastructure"},{"location":"sop/sop_mirrormanager2/#introduction","title":"Introduction","text":"

So you made a bad decision and now have to do things to Mirror Manager. Good luck.

"},{"location":"sop/sop_mirrormanager2/#pieces","title":"Pieces","text":"Item Runs on... Software Mirrorlist Server mirrormanager001 https://github.com/adrianreber/mirrorlist-server/ Mirror Manager 2 mirrormanager001 https://github.com/fedora-infra/mirrormanager2"},{"location":"sop/sop_mirrormanager2/#mirrorlist-server","title":"Mirrorlist Server","text":"

This runs two (2) instances. Apache/httpd is configured to send /mirrorlist to one and /debuglist to the other.

  • Every fifteen (15) minutes: Mirrorlist cache is regenerated

    • This queries the database for active mirrors and other information and writes a protobuf. The mirrorlist-server reads the protobuf and responds accordingly.
  • Every twenty (20) minutes: Service hosting /mirrorlist is restarted

  • Every twenty-one (21) minutes: Service hosting /debuglist is restarted

Note that the timing for the restart of the mirror list instances are arbitrary.

"},{"location":"sop/sop_mirrormanager2/#mirror-manager-2","title":"Mirror Manager 2","text":"

This is a uwsgi service fronted by an apache/httpd instance. This is responsible for everything else that is not /mirrorlist or /debuglist. This allows the mirror managers to, well, manage their mirrors.

"},{"location":"sop/sop_mirrormanager2/#cdn","title":"CDN","text":"

Fastly sits in front of mirror manager. VPN is required to access the /admin endpoints.

If the backend of the CDN is down, it will attempt to guess what the user wanted to access and spit out a result on the dl.rockylinux.org website. For example, a request for AppStream-8 and x86_64 will result in a AppStream/x86_64/os directory on dl.rockylinux.org. Note that this isn't perfect, but it helps in potential down time or patching.

Fastly -> www firewall -> mirrormanager server\n

In reality, the flow is a lot more complex, and a diagram should be created to map it out in a more user-friendly manner (@TODO)

User -> Fastly -> AWS NLB over TLS, passthru -> www firewall cluster (decrypt TLS) -> mirrormanager server (Rocky CA TLS)\n
"},{"location":"sop/sop_mirrormanager2/#tasks","title":"Tasks","text":"

Below are a list of possible tasks to take with mirror manager, depending on the scenario.

"},{"location":"sop/sop_mirrormanager2/#new-release","title":"New Release","text":"

For the following steps, the following must be completed:

  • Production rsync endpoints should have all brand new content
  • New content root should be locked down to 750 (without this, mirror manager cannot view it)
  • Disable mirrormanager user cronjobs

  • Update the database with the new content. This is run on a schedule normally (see previous section) but can be done manually.

    a. As the mirror manager user, run the following:

/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux'\n/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux SIGs'\n
  1. Update the redirects for $reponame-$releasever

    a. Use psql to mirrormanager server: psql -U mirrormanager -W -h mirrormanager_db_host mirrormanager_db b. Confirm that all three columns are filled and that the second and third columns are identical: select rr.from_repo AS \"From Repo\", rr.to_repo AS \"To Repo\", r.prefix AS \"Target Repo\" FROM repository_redirect AS rr LEFT JOIN repository AS r ON rr.to_repo = r.prefix GROUP BY r.prefix, rr.to_repo, rr.from_repo ORDER BY r.prefix ASC; c. Change the majorversion redirects to point to the new point release, for example: update repository_redirect set to_repo = regexp_replace(to_repo, '9\\.0', '9.1') where from_repo ~ '(\\w+)-9';

  2. Generate the mirrorlist cache and restart the debuglist and verify.

Once the bitflip is initiated, restart mirrorlist and reenable all cronjobs.

"},{"location":"sop/sop_mirrormanager2/#out-of-date-mirrors","title":"Out-of-date Mirrors","text":"
  1. Get current shasum of repomd.xml. For example: shasum=$(curl https://dl.rockylinux.org/pub/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum)
  2. Compare against latest propagation log:
tail -latr /var/log/mirrormanager/propagation/rocky-9.0-BaseOS-x86_64_propagation.log.*`\n\nexport VER=9.0\nawk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,\":\")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < $(find /var/log/mirrormanager/propagation/ -name \"rocky-${VER}-BaseOS-x86_64_propagation.log*\" -mtime -1 | tail -1)'\n

This will generate a table. You can take the IDs in the first column and use the database to disable them by ID (table name: hosts) or go to https://mirrors.rockylinux.org/mirrormanager/host/ID and uncheck 'User active'.

Users can change user active, but they cannot change admin active. It is better to flip user active in this case.

Admins can also view https://mirrors.rockylinux.org/mirrormanager/admin/all_sites if necessary.

Example of table columns:

[mirrormanager@ord1-prod-mirrormanager001 propagation]$ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,\":\")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < rocky-9.0-BaseOS-x86_64_propagation.log.1660611632 | column -t\n164  mirror.host.ag            http://mirror.host.ag/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml             404\n173  rocky.centos-repo.net     http://rocky.centos-repo.net/9.0/BaseOS/x86_64/os/repodata/repomd.xml            403\n92   rocky.mirror.co.ge        http://rocky.mirror.co.ge/9.0/BaseOS/x86_64/os/repodata/repomd.xml               404\n289  mirror.vsys.host          http://mirror.vsys.host/rockylinux/9.0/BaseOS/x86_64/os/repodata/repomd.xml      404\n269  mirrors.rackbud.com       http://mirrors.rackbud.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml        200\n295  mirror.ps.kz              http://mirror.ps.kz/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml               200\n114  mirror.liteserver.nl      http://rockylinux.mirror.liteserver.nl/9.0/BaseOS/x86_64/os/repodata/repomd.xml  200\n275  mirror.upsi.edu.my        http://mirror.upsi.edu.my/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml         200\n190  mirror.kku.ac.th          http://mirror.kku.ac.th/rocky-linux/9.0/BaseOS/x86_64/os/repodata/repomd.xml     404\n292  mirrors.cat.pdx.edu       http://mirrors.cat.pdx.edu/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml        200\n370  mirrors.gbnetwork.com     http://mirrors.gbnetwork.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml      404\n308  mirror.ihost.md           http://mirror.ihost.md/rockylinux/9.0/BaseOS/x86_64/os/repodata/repomd.xml       404\n87   mirror.freedif.org        http://mirror.freedif.org/Rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml         404\n194  mirrors.bestthaihost.com  http://mirrors.bestthaihost.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml   404\n30   mirror.admax.se           http://mirror.admax.se/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml            200\n195  mirror.uepg.br            http://mirror.uepg.br/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml             404\n247  mirrors.ipserverone.com   http://mirrors.ipserverone.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml    404'\n
"},{"location":"sop/sop_release/","title":"Rocky Release Procedures for SIG/Core (RelEng/Infrastructure)","text":"

This SOP contains all the steps required by SIG/Core (a mix of Release Engineering and Infrastructure) to perform releases of all Rocky Linux versions. Work is in all collaboration within the entire group of engineerings.

"},{"location":"sop/sop_release/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo @skip77 @mustafa @sherif @pgreco Mattermost Channels ~Infrastructure"},{"location":"sop/sop_release/#preparation","title":"Preparation","text":""},{"location":"sop/sop_release/#notes-about-release-day","title":"Notes about Release Day","text":"

Within a minimum of two (2) days, the following should be true:

  1. Torrents should be setup. All files can be synced with the seed box(es) but not yet published. The data should be verified using sha256sum and compared to the CHECKSUM files provided with the files.

  2. Website should be ready (typically with an open PR in github). The content should be verified that the design and content are correct and finalized.

  3. Enough mirrors should be setup. This essentially means that all content for a release should be synced to our primary mirror with the executable bit turned off, and the content should also be hard linked. In theory, mirror manager can be queried to verify if mirrors are or appear to be in sync.

"},{"location":"sop/sop_release/#notes-about-patch-days","title":"Notes about Patch Days","text":"

Within a minimum of one (1) to two (2) days, the following should be true:

  1. Updates should be completed in the build system, and verified in staging.

  2. Updates should be sent to production and file lists updated to allow mirrors to sync.

"},{"location":"sop/sop_release/#prior-to-release-day-notes","title":"Prior to Release Day notes","text":"

Ensure the SIG/Core Checklist is read thoroughly and executed as listed.

"},{"location":"sop/sop_release/#release-day","title":"Release Day","text":""},{"location":"sop/sop_release/#priorities","title":"Priorities","text":"

During release day, these should be verified/completed in order:

  1. Website - The primary website and user landing at rockylinux.org should allow the user to efficiently click through to a download link of an ISO, image, or torrent. It must be kept up.

  2. Torrent - The seed box(es) should be primed and ready to go for users downloading via torrent.

  3. Release Notes & Documentation - The release notes are often on the same website as the documentation. The main website and where applicable in the docs should refer to the Release Notes of Rocky Linux.

  4. Wiki - If applicable, the necessary changes and resources should be available for a release. In particular, if a major release has new repos, changed repo names, this should be documented.

  5. Everything else!

"},{"location":"sop/sop_release/#resources","title":"Resources","text":""},{"location":"sop/sop_release/#sigcore-checklist","title":"SIG/Core Checklist","text":""},{"location":"sop/sop_release/#beta","title":"Beta","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous beta release (rltype=beta)
    • Boot image install migration from previous beta release
  • Pass image to Testing Team for final validation

"},{"location":"sop/sop_release/#release-candidate","title":"Release Candidate","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous release
    • Boot image install migration from previous release
  • Pass image to Testing Team for validation

"},{"location":"sop/sop_release/#final","title":"Final","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous release
    • Boot image install migration from previous release
  • Pass image to Testing Team for final validation

  • Sync to Production
  • Sync to Europe Mirror if applicable
  • Hardlink Run
  • Bitflip after 24-48 Hours
Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"sop/sop_upstream_prep_checklist/","title":"Generalized Prep Checklist for Upcoming Releases","text":"

This SOP contains general checklists required by SIG/Core to prepare and plan for the upcoming release. This work, in general, is required to be done on a routine basis, even months out before the next major or minor release, as it requires monitoring of upstream's (CentOS Stream) work to ensure Rocky Linux will remain ready and compatible with Red Hat Enterprise Linux.

"},{"location":"sop/sop_upstream_prep_checklist/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo @skip77 @mustafa @sherif @pgreco Mattermost Channels ~Infrastructure"},{"location":"sop/sop_upstream_prep_checklist/#general-upstream-monitoring","title":"General Upstream Monitoring","text":"

It is expected to monitor the following repositories upstream, as these will indicate what is coming up for a given major or point release. These repositories are found at the Red Hat gitlab.

  • centos-release
  • centos-logos
  • pungi-centos
  • comps
  • module-defaults

These repositories can be monitored by setting to \"all activity\" on the bell icon.

Upon changes to the upstream repositories, SIG/Core member should analyze the changes and apply the same to the lookahead branches:

  • rocky-release

    • Manual changes required
  • rocky-logos

    • Manual changes required
  • pungi-rocky

    • Run sync-from-upstream
  • peridot-rocky

    • Configurations are generated using peridot tools
  • comps

    • Run sync-from-upstream
  • rocky-module-defaults

    • Run sync-from-upstream
"},{"location":"sop/sop_upstream_prep_checklist/#general-downward-merging","title":"General Downward Merging","text":"

Repositories that generally track for LookAhead and Beta releases will flow downward to the stable branch. For example:

* rXs / rXlh\n      |\n      |----> rX-beta\n                |\n                |----> rX\n

This applies to any specific rocky repo, such as comps, pungi, peridot-config, and so on. As it is expected some repos will deviate in commit history, it is OK to force push, under the assumption that changes made in the lower branch exists in the upper branch. That way you can avoid changes/functionality being reverted on accident.

"},{"location":"sop/sop_upstream_prep_checklist/#general-package-patching","title":"General Package Patching","text":"

There are packages that are patched typically for the purpose of debranding. List of patched packages are typically maintained in a metadata repository. The obvious ones are listed below and should be monitored and maintained properly:

  • abrt
  • anaconda
  • anaconda-user-help
  • chrony
  • cockpit
  • dhcp
  • dnf
  • firefox
  • fwupd
  • gcc
  • gnome-session
  • gnome-settings-daemon
  • grub2
  • initial-setup
  • kernel
  • kernel-rt
  • libdnf
  • libreoffice
  • libreport
  • lorax-templates-rhel
  • nginx
  • opa-ff
  • opa-fm
  • openldap
  • openscap
  • osbuild
  • osbuild-composer
  • PackageKit
  • pesign
  • python-pip
  • redhat-rpm-config
  • scap-security-guide
  • shim
  • shim-unsigned-x64
  • shim-unsigned-aarch64
  • subscription-manager
  • systemd
  • thunderbird
"}]} \ No newline at end of file +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"SIG/Core Wiki","text":""},{"location":"#about","title":"About","text":"

The Rocky Linux Core Special Interest Group (SIG/Core) dedicates themselves to the development, building, management, production, and release of Rocky Linux for the Enterprise Linux community and the many users around the world. This group is a mixture of core Rocky Linux developers and infrastructure and the members of this group are also members of other groups within the Rocky Linux community (such as SIG/AltArch) as well as the Enterprise Linux community as a whole.

"},{"location":"#mission","title":"Mission","text":"

SIG/Core strives to ensure a stable distribution is developed, built, tested, and provided to the community from the RESF as a compatible derivative of Red Hat Enterprise Linux. To achieve this goal, some of the things we do are:

  • Ensuring a quality and fully compatible release product
  • Developing and iterating on the build systems and architecture
  • Developing all code in the open
  • Setting the technical direction for the build system architecture
  • Release of beta and final products to the end users and mirrors
  • Release of timely updates to the end users and mirrors

See the What We Do page for a more detailed explanation of our activities.

"},{"location":"#getting-in-touch-contributing","title":"Getting In Touch / Contributing","text":"

There are various ways to get in touch with SIG/Core and provide help, assistance, or even just ideas that can benefit us or the entire community.

  • Chat

    • Mattermost: ~development on Mattermost
    • IRC: #rockylinux and #rockylinux-devel on libera.chat
    • Matrix: Rocky Linux General / Support and Rocky Linux Development
  • RESF SIG/Core Issue Tracker

  • Mail List

For a list of our members, see the Members page.

"},{"location":"#resources-and-rocky-linux-policies","title":"Resources and Rocky Linux Policies","text":"
  • RESF Git Service
  • Rocky Linux GitHub
  • Rocky Linux GitLab
  • Rocky Linux Image Guide
  • Rocky Linux Repository Guide
  • Rocky Linux Release Version Guide/Policy
  • Special Interest Groups.
"},{"location":"#general-packaging-resources","title":"General Packaging Resources","text":"
  • RPM Packaging Guide
  • Fedora Packaging Guidelines
  • Basic Packaging Tutorial
"},{"location":"members/","title":"Members","text":"

SIG/Core is a mix of Release Engineering and Infrastructure members to ensure a high quality release of Rocky Linux as well as the uptime of the services provided to the community. The current members of this group are listed in the table below.

Role Name Email Mattermost Name IRC Name Identity Management & Release Engineering Co-Lead Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Release Engineering Co-Lead Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Release Engineering and Development Skip Grube skip@rockylinux.org @skip77 Release Engineering and Development Sherif Nagy sherif@rockylinux.org @sherif Release Engineering and Development Pablo Greco pgreco@rockylinux.org @pgreco pgreco Infrastructure Lead Neil Hanlon neil@resf.org @neil neil Infrastructure Lead Taylor Goodwill tg@resf.org @tgo tg"},{"location":"what_we_do/","title":"What We Do","text":"

SIG/Core (or Release Engineering) was brought together as a combination of varying expertise (development and infrastructure) to try to fill in gaps of knowledge but to also to ensure the primary goal of having a stable release of Rocky Linux is reached.

Some of the things we do in pursuit of our mission goals:

  • Continuous preparation for upcoming changes from upstream (Fedora and CentOS Stream)
  • Distribution release and maintenance
  • Design and development work to integrate all components together
  • Maintenance of the infrastructure used to build and maintain Rocky Linux (such as ansible roles and playbooks)
  • Working with the testing team with images and a platform to test
  • Providing resources for Special Interest Groups
  • Providing assistance and resources for users within the community
"},{"location":"documentation/","title":"Release General Overview","text":"

This section goes over at a high level how we compose releases for Rocky Linux. As most of our tools are home grown, we have made sure that the tools are open source and in our git services.

This page should serve as an idea of the steps we generally take and we hope that other projects out there who wish to also use our tools can make sure they can use them in this same way, whether they want to be an Enterprise Linux derivative or another project entirely.

"},{"location":"documentation/#build-system-and-tools","title":"Build System and Tools","text":"

The tools in use for the distribution are in the table below.

Tool Maintainer Code Location srpmproc SIG/Core at RESF GitHub empanadas SIG/Core at RESF sig-core-toolkit Peridot SIG/Core at RESF GitHub MirrorManager 2 Fedora Project MirrorManager2

For Rocky Linux to be build, we use Peridot as the build system and empanadas to \"compose\" the distribution. As we do not use Koji for Rocky Linux beyond version 9, pungi can no longer be used. Peridot instead takes pungi configuration data and comps and transforms them into a format it can understand. Empanadas then comes in to do the \"compose\" and sync all the repositories down.

"},{"location":"documentation/#full-compose-major-or-minor-releases","title":"Full Compose (major or minor releases)","text":"

Step by step, it looks like this:

  • Distribution is built and maintained in Peridot
  • Comps and pungi configuration is converted into the peridot format for the project
  • Repositories are created in yumrepofs based on the configuration provided
  • A repoclosure is ran against the repositories from empanadas to ensure there are no critical issues
  • In Parallel:

    • Repositories are synced as a \"full run\" in empanadas
    • Lorax is ran using empanadas in the peridot cluster
  • Lorax results are pulled down from an S3 bucket

  • DVD images are built for each architecture
  • Compose directory is synced to staging for verification
  • Staging is synced to production to allow mirror syncing
  • Bit flip on release day
"},{"location":"documentation/#general-updates","title":"General Updates","text":"

Step by step, it looks like this:

  • Distribution is maintained in Peridot
  • Updates are built, repos are then \"hashed\" in yumrepofs
  • Empanadas syncs updates as needed, either per repo or all repos at once
  • Updates are synced to staging to be verified
  • Staging is synced to production to allow mirror syncing
"},{"location":"documentation/empanadas/","title":"Empanadas","text":"

This page goes over empanadas, which is part of the SIG/Core toolkit. Empanadas assists SIG/Core is composing repositories, creating ISO's, creating images, and various other activities in Rocky Linux. It is also used for general testing and debugging of repositories and its metadata.

"},{"location":"documentation/empanadas/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact releng@rockylinux.org Mattermost Contacts @label @neil Mattermost Channels ~Development"},{"location":"documentation/empanadas/#general-information","title":"General Information","text":"

empanadas is a python project using poetry, containing various built-in modules with the goal to try to emulate the Fedora Project's pungi to an extent. While it is not perfect, it achieves the very basic goals of creating repositories, images and ISO's for consumption by the end user. It also has interactions with peridot, the build system used by the RESF to build the Rocky Linux distribution.

For performing syncs, it relies on the use of podman to perform syncing in a parallel fashion. This was done because it is not possible to run multiple dnf transactions at once on a single system and looping one repository at a time is not sustainable (nor fast).

"},{"location":"documentation/empanadas/#requirements","title":"Requirements","text":"
  • Poetry must be installed on the system
  • Podman must be installed on the system
  • fpart must be installed on the system (available in EPEL on EL systems)
  • Enough storage should be available if repositories are being synced
  • mock must be installed if building live images
  • System must be an Enterprise Linux system or Fedora with the %rhel macro set
"},{"location":"documentation/empanadas/#features","title":"Features","text":"

As of this writing, empanadas has the following abilities:

  • Repository syncing via dnf from a peridot instance or applicable repos
  • Per profile dnf repoclosure checking for all applicable repos
  • Per profile dnf repoclosure checking for peridot instance repositories
  • Basic ISO Building via lorax
  • Extra ISO Building via xorriso for DVD and minimal images
  • Live ISO Building using livemedia-creator and mock
  • Anaconda treeinfo builder
  • Cloud Image builder
"},{"location":"documentation/empanadas/#installing-empanadas","title":"Installing Empanadas","text":"

The below is how to install empanadas from the development branch on a Fedora system.

% dnf install git podman fpart poetry mock -y\n% git clone https://git.resf.org/sig_core/toolkit.git -b devel\n% cd toolkit/iso/empanadas\n% poetry install\n
"},{"location":"documentation/empanadas/#configuring-empanadas","title":"Configuring Empanadas","text":"

Depending on how you are using empanadas will depend on how your configurations will be setup.

  • empanadas/common.py
  • empanadas/config/*.yaml
  • empanadas/sig/*.yaml

These configuration files are delicate and can control a wide variety of the moving parts of empanadas. As these configurations are fairly massive, we recommend checking the reference guides for deeper details into configuring for base distribution or \"SIG\" content.

"},{"location":"documentation/empanadas/#using-empanadas","title":"Using Empanadas","text":"

The most common way to use empanadas is to sync repositories from a peridot instance. This is performed upon each release or on each set of updates as they come from upstream. Below lists how to use empanadas, as well as the common options.

Note that for each of these commands, it is fully expected you are running poetry run in the root of empanadas.

# Syncs all repositoryes for the \"9\" release\n% poetry run sync_from_peridot --release 9 --clean-old-packages\n\n# Syncs only the BaseOS repository without syncing sources\n% poetry run sync_from_peridot --release 9 --clean-old-packages --repo BaseOS --ignore-source\n\n# Syncs only AppStream for ppc64le\n% poetry run sync_from_peridot --release 9 --clean-old-packages --repo AppStream --arch ppc64le\n
Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"documentation/peridot/","title":"Peridot Build System","text":"

This page goes over the Peridot Build System and how SIG/Core utilizes it.

More to come.

"},{"location":"documentation/rebuild/","title":"Rebuild Version Bump","text":"

In some cases, a package has to be rebuilt. A package may be rebuilt for these reasons:

  • Underlying libraries have been rebased
  • ABI changes that require a rebuild (mass rebuilds, though they are rare)
  • New architecture added to a project

This typically applies to packages being built from a given src subgroup. Packages pulled from upstream don't fall into this category in normal circumstances. In those cases, they receive .0.1 and so on as standalone rebuilds.

"},{"location":"documentation/compose/","title":"Composing and Managing Releases","text":"

This section goes over the process of composing a release from a bunch of packages to repositories, to images. This section also goes over the basics of working with koji when necessary.

"},{"location":"documentation/compose/koji/","title":"Updates and Management in Koji, A Manual","text":"

More to come.

"},{"location":"documentation/references/","title":"References","text":"

Use this section to locate reference configuration items for the toolkit.

"},{"location":"documentation/references/empanadas_common/","title":"Empanadas common.py Configuration","text":"

The common.py configuration contains dictionaries and classes that dictate most of the functionality of empanadas.

"},{"location":"documentation/references/empanadas_common/#config-items","title":"Config Items","text":"

type: Dictionary

"},{"location":"documentation/references/empanadas_common/#configrlmacro","title":"config.rlmacro","text":"

type: String

required: True

description: Empanadas expects to run on an EL system. This is part of the general check up. It should not be hardcoded and use the rpm python module.

"},{"location":"documentation/references/empanadas_common/#configdist","title":"config.dist","text":"

type: String

required: False

description: Was the original tag placed in mock configs. This combines el with the rpm python module expansion. This is no longer required. The option is still available for future use.

"},{"location":"documentation/references/empanadas_common/#configarch","title":"config.arch","text":"

type: String

required: True

description: The architecture of the current running system. This is checked against the supported architectures in general release configurations. This should not be hardcoded.

"},{"location":"documentation/references/empanadas_common/#configdate_stamp","title":"config.date_stamp","text":"

type: String

required: True

description: Date time stamp in the form of YYYYMMDD.HHMMSS. This should not be hardcoded.

"},{"location":"documentation/references/empanadas_common/#configcompose_root","title":"config.compose_root","text":"

type: String

required: True

description: Root path of composes on the system running empanadas.

"},{"location":"documentation/references/empanadas_common/#configstaging_root","title":"config.staging_root","text":"

type: String

required: False

description: For future use. Root path of staging repository location where content will be synced to.

"},{"location":"documentation/references/empanadas_common/#configproduction_root","title":"config.production_root","text":"

type: String

required: False

description: For future use. Root path of production repository location where content will be synced to from staging.

"},{"location":"documentation/references/empanadas_common/#configcategory_stub","title":"config.category_stub","text":"

type: String

required: True

description: For future use. Stub path that is appended to staging_root and production_root.

example: mirror/pub/rocky

"},{"location":"documentation/references/empanadas_common/#configsig_category_stub","title":"config.sig_category_stub","text":"

type: String

required: True

description: For future use. Stub path that is appended to staging_root and production_root for SIG content.

example: mirror/pub/sig

"},{"location":"documentation/references/empanadas_common/#configrepo_base_url","title":"config.repo_base_url","text":"

type: String

required: True

description: URL to the base url's where the repositories live. This is typically to a peridot instance. This is supplemented by the configuration project_id parameter.

Note that this does not have to be a peridot instance. The combination of this value and project_id can be sufficient enough for empanadas to perform its work.

"},{"location":"documentation/references/empanadas_common/#configmock_work_root","title":"config.mock_work_root","text":"

type: String

required: True

description: Hardcoded path to where ISO work is performed within a mock chroot. This is the default path created by mock and it is recommended not to change this.

example: /builddir

"},{"location":"documentation/references/empanadas_common/#configcontainer","title":"config.container","text":"

type: String

required: True

description: This is the container used to perform all operations in podman.

example: centos:stream9

"},{"location":"documentation/references/empanadas_common/#configdistname","title":"config.distname","text":"

type: String

required: True

description: Name of the distribution you are building or building for.

example: Rocky Linux

"},{"location":"documentation/references/empanadas_common/#configshortname","title":"config.shortname","text":"

type: String

required: True

description: Short name of the distribution you are building or building for.

example: Rocky

"},{"location":"documentation/references/empanadas_common/#configtranslators","title":"config.translators","text":"

type: Dictionary

required: True

description: Translates Linux architectures to golang architectures. Reserved for future use.

"},{"location":"documentation/references/empanadas_common/#configaws_region","title":"config.aws_region","text":"

type: String

required: False

description: Region you are working in with AWS or onprem cloud that supports this variable.

example: us-east-2

"},{"location":"documentation/references/empanadas_common/#configbucket","title":"config.bucket","text":"

type: String

required: False

description: Name of the S3-compatible bucket that is used to pull images from. Requires aws_region.

"},{"location":"documentation/references/empanadas_common/#configbucket_url","title":"config.bucket_url","text":"

type: String

required: False

description: URL of the S3-compatible bucket that is used to pull images from.

"},{"location":"documentation/references/empanadas_common/#allowed_type_variants-items","title":"allowed_type_variants items","text":"

type: Dictionary

description: Key value pairs of cloud or image variants. The value is either None or a list type.

"},{"location":"documentation/references/empanadas_common/#reference-example","title":"Reference Example","text":"
config = {\n    \"rlmacro\": rpm.expandMacro('%rhel'),\n    \"dist\": 'el' + rpm.expandMacro('%rhel'),\n    \"arch\": platform.machine(),\n    \"date_stamp\": time.strftime(\"%Y%m%d.%H%M%S\", time.localtime()),\n    \"compose_root\": \"/mnt/compose\",\n    \"staging_root\": \"/mnt/repos-staging\",\n    \"production_root\": \"/mnt/repos-production\",\n    \"category_stub\": \"mirror/pub/rocky\",\n    \"sig_category_stub\": \"mirror/pub/sig\",\n    \"repo_base_url\": \"https://yumrepofs.build.resf.org/v1/projects\",\n    \"mock_work_root\": \"/builddir\",\n    \"container\": \"centos:stream9\",\n    \"distname\": \"Rocky Linux\",\n    \"shortname\": \"Rocky\",\n    \"translators\": {\n        \"x86_64\": \"amd64\",\n        \"aarch64\": \"arm64\",\n        \"ppc64le\": \"ppc64le\",\n        \"s390x\": \"s390x\",\n        \"i686\": \"386\"\n    },\n    \"aws_region\": \"us-east-2\",\n    \"bucket\": \"resf-empanadas\",\n    \"bucket_url\": \"https://resf-empanadas.s3.us-east-2.amazonaws.com\"\n}\n\nALLOWED_TYPE_VARIANTS = {\n        \"Azure\": None,\n        \"Container\": [\"Base\", \"Minimal\", \"UBI\"],\n        \"EC2\": None,\n        \"GenericCloud\": None,\n        \"Vagrant\": [\"Libvirt\", \"Vbox\"],\n        \"OCP\": None\n\n}\n
"},{"location":"documentation/references/empanadas_config/","title":"Empanadas config yaml Configuration","text":"

Each file in empanads/config/ is a yaml file that contains configuration items for the distribution release version. The configuration can heavily dictate the functionality and what features are directly supported by empanadas when ran.

See the items below to see which options are mandatory and optional.

"},{"location":"documentation/references/empanadas_config/#config-items","title":"Config Items","text":""},{"location":"documentation/references/empanadas_config/#top-level","title":"Top Level","text":"

The Top Level is the name of the profile and starts the YAML dictionary for the release. It is alphanumeric and accepts punctuation within reason. Common examples:

  • 9
  • 9-beta
  • 8-lookahead
"},{"location":"documentation/references/empanadas_config/#fullname","title":"fullname","text":"

type: String

required: True

description: Needed for treeinfo and discinfo generation.

"},{"location":"documentation/references/empanadas_config/#revision","title":"revision","text":"

type: String

required: True

description: Full version of a release

"},{"location":"documentation/references/empanadas_config/#rclvl","title":"rclvl","text":"

type: String

required: True

description: Release Candidate or Beta descriptor. Sets names and versions with this descriptor if enabled.

"},{"location":"documentation/references/empanadas_config/#major","title":"major","text":"

type: String

required: True

description: Major version of a release

"},{"location":"documentation/references/empanadas_config/#minor","title":"minor","text":"

type: String

required: True

description: Minor version of a release

"},{"location":"documentation/references/empanadas_config/#profile","title":"profile","text":"

type: String

required: True

description: Matches the top level of the release. This should not differ from the top level assignment.

"},{"location":"documentation/references/empanadas_config/#disttag","title":"disttag","text":"

type: String

required: True

description: Sets the dist tag for mock configs.

"},{"location":"documentation/references/empanadas_config/#bugurl","title":"bugurl","text":"

type: String

required: True

description: A URL to the bug tracker for this release or distribution.

"},{"location":"documentation/references/empanadas_config/#checksum","title":"checksum","text":"

type: String

required: True

description: Checksum type. Used when generating checksum information for images.

"},{"location":"documentation/references/empanadas_config/#fedora_major","title":"fedora_major","text":"

type: String

required: False

description: For future use with icicle.

"},{"location":"documentation/references/empanadas_config/#allowed_arches","title":"allowed_arches","text":"

type: list

required: True

description: List of supported architectures for this release.

"},{"location":"documentation/references/empanadas_config/#provide_multilib","title":"provide_multilib","text":"

type: boolean

required: True

description: Sets if architecture x86_64 will be multilib. It is recommended that this is set to True.

"},{"location":"documentation/references/empanadas_config/#project_id","title":"project_id","text":"

type: String

required: True

description: Appended to the base repo URL in common.py. For peridot, it is the project id that is generated for the project you are pulling from. It can be set to anything else if need be for non-peridot use.

"},{"location":"documentation/references/empanadas_config/#repo_symlinks","title":"repo_symlinks","text":"

type: dict

required: False

description: For future use. Sets symlinks to repositories for backwards compatibility. Key value pairs only.

"},{"location":"documentation/references/empanadas_config/#renames","title":"renames","text":"

type: dict

required: False

description: Renames a repository to the value set. For example, renaming all to devel. Set to {} if no renames are goign to occur.

"},{"location":"documentation/references/empanadas_config/#all_repos","title":"all_repos","text":"

type: list

required: True

description: List of repositories that will be synced/managed by empanadas.

"},{"location":"documentation/references/empanadas_config/#structure","title":"structure","text":"

type: dict

required: True

description: Key value pairs of packages and repodata. These are appended appropriately during syncing and ISO actions. Setting these are mandatory.

"},{"location":"documentation/references/empanadas_config/#iso_map","title":"iso_map","text":"

type: dictionary

required: True if building ISO's and operating with lorax.

description: Controls how lorax and extra ISO's are built.

If are you not building images, set to {}

"},{"location":"documentation/references/empanadas_config/#xorrisofs","title":"xorrisofs","text":"

type: boolean

required: True

description: Dictates of xorrisofs is used to build images. Setting to false uses genisoimage. It is recommended that xorrisofs is used.

"},{"location":"documentation/references/empanadas_config/#iso_level","title":"iso_level","text":"

type: boolean

required: True

description: Set to false if you are using xorrisofs. Can be set to true when using genisoimage.

"},{"location":"documentation/references/empanadas_config/#images","title":"images","text":"

type: dict

required: True

description: Dictates the ISO images that will be made or the treeinfo that will be generated.

Note: The primary repository (for example, BaseOS) will need to be listed to ensure the treeinfo data is correctly generated. disc should be set to False and isoskip should be set to True. See the example section for an example.

"},{"location":"documentation/references/empanadas_config/#namedisc","title":"name.disc","text":"

type: boolean

required: True

description: This tells the iso builder if this will be a generated ISO.

"},{"location":"documentation/references/empanadas_config/#nameisoskip","title":"name.isoskip","text":"

type: boolean

required: False

description: This tells the iso builder if this will be skipped, even if disc is set to True. Default is False.

"},{"location":"documentation/references/empanadas_config/#namevariant","title":"name.variant","text":"

type: string

required: True

description: Names the primary variant repository for the image. This is set in .treeinfo.

"},{"location":"documentation/references/empanadas_config/#namerepos","title":"name.repos","text":"

type: list

required: True

description: Names of the repositories included in the image. This is added to .treeinfo.

"},{"location":"documentation/references/empanadas_config/#namevolname","title":"name.volname","text":"

type: string

required: True

required value: dvd

description: This is required if building more than the DVD image. By default, the the name dvd is harcoded in the buildImage template.

"},{"location":"documentation/references/empanadas_config/#lorax","title":"lorax","text":"

type: dict

required: True if building lorax images.

description: Sets up lorax images and which repositories to use when building lorax images.

"},{"location":"documentation/references/empanadas_config/#loraxrepos","title":"lorax.repos","text":"

type: list

required: True

description: List of repos that are used to pull packages to build the lorax images.

"},{"location":"documentation/references/empanadas_config/#loraxvariant","title":"lorax.variant","text":"

type: string

required: True

description: Base repository for the release

"},{"location":"documentation/references/empanadas_config/#loraxlorax_removes","title":"lorax.lorax_removes","text":"

type: list

required: False

description: Excludes packages that are not needed when lorax is running.

"},{"location":"documentation/references/empanadas_config/#loraxrequired_pkgs","title":"lorax.required_pkgs","text":"

type: list

required: True

description: Required list of installed packages needed to build lorax images.

"},{"location":"documentation/references/empanadas_config/#livemap","title":"livemap","text":"

type: dict

required: False

description: Dictates what live images are built and how they are built.

"},{"location":"documentation/references/empanadas_config/#livemapgit_repo","title":"livemap.git_repo","text":"

type: string

required: True

description: The git repository URL where the kickstarts live

"},{"location":"documentation/references/empanadas_config/#livemapbranch","title":"livemap.branch","text":"

type: string

required: True

description: The branch being used for the kickstarts

"},{"location":"documentation/references/empanadas_config/#livemapksentry","title":"livemap.ksentry","text":"

type: dict

required: True

description: Key value pairs of the live images being created. Key being the name of the live image, value being the kickstart name/path.

"},{"location":"documentation/references/empanadas_config/#livemapallowed_arches","title":"livemap.allowed_arches","text":"

type: list

required: True

description: List of allowed architectures that will build for the live images.

"},{"location":"documentation/references/empanadas_config/#livemaprequired_pkgs","title":"livemap.required_pkgs","text":"

type: list

required: True

description: Required list of packages needed to build the live images.

"},{"location":"documentation/references/empanadas_config/#cloudimages","title":"cloudimages","text":"

type: dict

required: False

description: Cloud related settings.

Set to {} if not needed.

"},{"location":"documentation/references/empanadas_config/#cloudimagesimages","title":"cloudimages.images","text":"

type: dict

required: True

description: Cloud images that will be generated and in a bucket to be pulled, and their format.

"},{"location":"documentation/references/empanadas_config/#cloudimagesimagesname","title":"cloudimages.images.name","text":"

type: dict

required: True

description: Name of the cloud image being pulled.

Accepted key value options:

  • format, which is raw, qcow2, vhd, tar.xz
  • variants, which is a list
  • primary_variant, which symlinks to the \"primary\" variant in the variant list
"},{"location":"documentation/references/empanadas_config/#repoclosure_map","title":"repoclosure_map","text":"

type: dict

required: True

description: Repoclosure settings. These settings are absolutely required when doing full syncs and need to check repositories for consistency.

"},{"location":"documentation/references/empanadas_config/#repoclosure_maparches","title":"repoclosure_map.arches","text":"

type: dict

required: True

description: For each architecture (key), dnf switches/settings that dictate how repoclosure will check for consistency (value, string).

example: x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'

"},{"location":"documentation/references/empanadas_config/#repoclosure_maprepos","title":"repoclosure_map.repos","text":"

type: dict

required: True

description: For each repository that is pulled for a given release(key), repositories that will be included in the repoclosure check. A repository that only checks against itself must have a value of [].

"},{"location":"documentation/references/empanadas_config/#extra_files","title":"extra_files","text":"

type: dict

required: True

description: Extra files settings and where they come from. Git repositories are the only supported method.

"},{"location":"documentation/references/empanadas_config/#extra_filesgit_repo","title":"extra_files.git_repo","text":"

type: string

required: True

description: URL to the git repository with the extra files.

"},{"location":"documentation/references/empanadas_config/#extra_filesgit_raw_path","title":"extra_files.git_raw_path","text":"

type: string

required: True

description: URL to the git repository with the extra files, but the \"raw\" url form.

example: git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'

"},{"location":"documentation/references/empanadas_config/#extra_filesbranch","title":"extra_files.branch","text":"

type: string

required: True

description: Branch where the extra files are pulled from.

"},{"location":"documentation/references/empanadas_config/#extra_filesgpg","title":"extra_files.gpg","text":"

type: dict

required: True

description: For each gpg key type (key), the relative path to the key in the git repository (value).

These keys help set up the repository configuration when doing syncs.

By default, the RepoSync class sets stable as the gpgkey that is used.

"},{"location":"documentation/references/empanadas_config/#extra_fileslist","title":"extra_files.list","text":"

type: list

required: True

description: List of files from the git repository that will be used as \"extra\" files and placed in the repositories and available to mirrors and will appear on ISO images if applicable.

"},{"location":"documentation/references/empanadas_config/#reference-example","title":"Reference Example","text":"
---\n'9':\n  fullname: 'Rocky Linux 9.0'\n  revision: '9.0'\n  rclvl: 'RC2'\n  major: '9'\n  minor: '0'\n  profile: '9'\n  disttag: 'el9'\n  bugurl: 'https://bugs.rockylinux.org'\n  checksum: 'sha256'\n  fedora_major: '20'\n  allowed_arches:\n    - x86_64\n    - aarch64\n    - ppc64le\n    - s390x\n  provide_multilib: True\n  project_id: '55b17281-bc54-4929-8aca-a8a11d628738'\n  repo_symlinks:\n    NFV: 'nfv'\n  renames:\n    all: 'devel'\n  all_repos:\n    - 'all'\n    - 'BaseOS'\n    - 'AppStream'\n    - 'CRB'\n    - 'HighAvailability'\n    - 'ResilientStorage'\n    - 'RT'\n    - 'NFV'\n    - 'SAP'\n    - 'SAPHANA'\n    - 'extras'\n    - 'plus'\n  structure:\n    packages: 'os/Packages'\n    repodata: 'os/repodata'\n  iso_map:\n    xorrisofs: True\n    iso_level: False\n    images:\n      dvd:\n        disc: True\n        variant: 'AppStream'\n        repos:\n          - 'BaseOS'\n          - 'AppStream'\n      minimal:\n        disc: True\n        isoskip: True\n        repos:\n          - 'minimal'\n          - 'BaseOS'\n        variant: 'minimal'\n        volname: 'dvd'\n      BaseOS:\n        disc: False\n        isoskip: True\n        variant: 'BaseOS'\n        repos:\n          - 'BaseOS'\n          - 'AppStream'\n    lorax:\n      repos:\n        - 'BaseOS'\n        - 'AppStream'\n      variant: 'BaseOS'\n      lorax_removes:\n        - 'libreport-rhel-anaconda-bugzilla'\n      required_pkgs:\n        - 'lorax'\n        - 'genisoimage'\n        - 'isomd5sum'\n        - 'lorax-templates-rhel'\n        - 'lorax-templates-generic'\n        - 'xorriso'\n  cloudimages:\n    images:\n      EC2:\n        format: raw\n      GenericCloud:\n        format: qcow2\n  livemap:\n    git_repo: 'https://git.resf.org/sig_core/kickstarts.git'\n    branch: 'r9'\n    ksentry:\n      Workstation: rocky-live-workstation.ks\n      Workstation-Lite: rocky-live-workstation-lite.ks\n      XFCE: rocky-live-xfce.ks\n      KDE: rocky-live-kde.ks\n      MATE: rocky-live-mate.ks\n    allowed_arches:\n      - x86_64\n      - aarch64\n    required_pkgs:\n      - 'lorax-lmc-novirt'\n      - 'vim-minimal'\n      - 'pykickstart'\n      - 'git'\n  variantmap:\n    git_repo: 'https://git.rockylinux.org/rocky/pungi-rocky.git'\n    branch: 'r9'\n    git_raw_path: 'https://git.rockylinux.org/rocky/pungi-rocky/-/raw/r9/'\n  repoclosure_map:\n    arches:\n      x86_64: '--forcearch=x86_64 --arch=x86_64 --arch=athlon --arch=i686 --arch=i586 --arch=i486 --arch=i386 --arch=noarch'\n      aarch64: '--forcearch=aarch64 --arch=aarch64 --arch=noarch'\n      ppc64le: '--forcearch=ppc64le --arch=ppc64le --arch=noarch'\n      s390x: '--forcearch=s390x --arch=s390x --arch=noarch'\n    repos:\n      devel: []\n      BaseOS: []\n      AppStream:\n        - BaseOS\n      CRB:\n        - BaseOS\n        - AppStream\n      HighAvailability:\n        - BaseOS\n        - AppStream\n      ResilientStorage:\n        - BaseOS\n        - AppStream\n      RT:\n        - BaseOS\n        - AppStream\n      NFV:\n        - BaseOS\n        - AppStream\n      SAP:\n        - BaseOS\n        - AppStream\n        - HighAvailability\n      SAPHANA:\n        - BaseOS\n        - AppStream\n        - HighAvailability\n  extra_files:\n    git_repo: 'https://git.rockylinux.org/staging/src/rocky-release.git'\n    git_raw_path: 'https://git.rockylinux.org/staging/src/rocky-release/-/raw/r9/'\n    branch: 'r9'\n    gpg:\n      stable: 'SOURCES/RPM-GPG-KEY-Rocky-9'\n      testing: 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'\n    list:\n      - 'SOURCES/Contributors'\n      - 'SOURCES/COMMUNITY-CHARTER'\n      - 'SOURCES/EULA'\n      - 'SOURCES/LICENSE'\n      - 'SOURCES/RPM-GPG-KEY-Rocky-9'\n      - 'SOURCES/RPM-GPG-KEY-Rocky-9-Testing'\n...\n
"},{"location":"documentation/references/empanadas_sig_config/","title":"Empanadas SIG yaml Configuration","text":"

Each file in empanads/sig/ is a yaml file that contains configuration items for the distribution release version. The configuration determines the structure of the SIG repositories synced from Peridot or a given repo.

Note that a release profile (for a major version) is still required for this sync to work.

See the items below to see which options are mandatory and optional.

"},{"location":"documentation/references/empanadas_sig_config/#config-items","title":"Config Items","text":""},{"location":"documentation/references/empanadas_sig_config/#reference-example","title":"Reference Example","text":""},{"location":"include/resources_bottom/","title":"Resources bottom","text":"Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"sop/","title":"SOP (Standard Operationg Procedures)","text":"

This section goes over the various SOP's for SIG/Core. Please use the menu items to find the various pages of interest.

"},{"location":"sop/sop_compose/","title":"SOP: Compose and Repo Sync for Rocky Linux and Peridot","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for the distribution. It contains information of the scripts that are utilized and in what order, depending on the use case.

"},{"location":"sop/sop_compose/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose/#related-git-repositories","title":"Related Git Repositories","text":"

There are several git repositories used in the overall composition of a repository or a set of repositories.

Pungi - This repository contains all the necessary pungi configuration files that peridot translates into its own configuration. Pungi is no longer used for Rocky Linux.

Comps - This repository contains all the necessary comps (which are groups and other data) for a given major version. Peridot (and pungi) use this information to properly build repositories.

Toolkit - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.

"},{"location":"sop/sop_compose/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose/#empanadas","title":"Empanadas","text":"

Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.

"},{"location":"sop/sop_compose/#running-a-compose","title":"Running a Compose","text":"

First, the toolkit must be cloned. In the iso/empanadas directory, run poetry install. You'll then have access to the various commands needed:

  • sync_from_peridot
  • build-iso
  • build-iso-extra
  • pull-unpack-tree
  • pull-cloud-image
  • finalize_compose

To perform a full compose, this order is expected (replacing X with major version or config profile)

# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-Rocky-X\npoertry run sync_from_peridot --release X --hashed --repoclosure --full-run\n\n# On each architecture, this must be ran to generate the lorax images\n# !! Use --rc if the image is a release candidate or a beta image\n# Note: This is typically done using kubernetes and uploaded to a bucket\npoetry run build-iso --release X --isolation=None\n\n# The images are pulled from the bucket\npoetry run pull-unpack-tree --release X\n\n# The extra ISO's (usually just DVD) are generated\n# !! Use --rc if the image is a release candidate or a beta image\n# !! Set --extra-iso-mode to mock if desired\n# !! If there is more than the dvd, remove --extra-iso dvd\npoetry run build-iso-extra --release X --extra-iso dvd --extra-iso-mode podman\n\n# This pulls the generic and EC2 cloud images\npoetry run pull-cloud-image --release X\n\n# This ensures everything is closed out for a release. This copies iso's, images,\n# generates metadata, and the like.\n# !! DO NOT RUN DURING INCREMENTAL UPDATES !!\npoetry run finalize_compose --release X\n
"},{"location":"sop/sop_compose/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.

The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.

.\n\u251c\u2500\u2500 common\n\u251c\u2500\u2500 common_8\n\u251c\u2500\u2500 common_9\n

These are for the releases in general. What they do is noted below.

\u251c\u2500\u2500 gen-torrents.sh                  -> Generates torrents for images\n\u251c\u2500\u2500 minor-release-sync-to-staging.sh -> Syncs a minor release to staging\n\u251c\u2500\u2500 prep-staging-X.sh                -> Preps staging updates and signs repos (only for 8)\n\u251c\u2500\u2500 sign-repos-only.sh               -> Signs the repomd (only for 8)\n\u251c\u2500\u2500 sync-file-list-parallel.sh       -> Generates file lists in parallel for mirror sync scripts\n\u251c\u2500\u2500 sync-to-prod.sh                  -> Syncs staging to production\n\u251c\u2500\u2500 sync-to-prod.delete.sh           -> Syncs staging to production (deletes artifacts that are no longer in staging)\n\u251c\u2500\u2500 sync-to-prod-sig.sh              -> Syncs a sig provided compose to production\n\u251c\u2500\u2500 sync-to-staging.sh               -> Syncs a provided compose to staging\n\u251c\u2500\u2500 sync-to-staging.delete.sh        -> Syncs a provided compose to staging (deletes artifacts that are no longer in the compose)\n\u251c\u2500\u2500 sync-to-staging-sig.sh           -> Syncs a sig provided compose to staging\n

Generally, you will only run sync-to-staging.sh or sync-to-staging.delete.sh to sync. The former is for older releases, the latter is for newer releases. Optionally, if you are syncing a \"beta\" or \"lookahead\" release, you will need to also provide the RLREL variable as beta or lookahead.

# The below syncs to staging for Rocky Linux 8\nRLVER=8 bash sync-to-staging.sh Rocky\n# The below syncs to staging for Rocky Linux 9\nRLVER=9 bash sync-to-staging.delete.sh Rocky\n

Once the syncs are done, staging must be tested and vetted before being sent to production. Once staging is completed, it is synced to production.

bash RLVER=8 sync-to-prod.sh\nbash RLVER=9 sync-to-prod.delete.sh\nbash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

Note: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.

"},{"location":"sop/sop_compose_8/","title":"SOP: Compose and Repo Sync for Rocky Linux 8","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Rocky Linux 8. It contains information of the scripts that are utilized and in what order, depending on the use case.

Please see the other SOP for Rocky Linux 9+ that are managed via empanadas and peridot.

"},{"location":"sop/sop_compose_8/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose_8/#related-git-repositories","title":"Related Git Repositories","text":"

There are several git repositories used in the overall composition of a repository or a set of repositories.

Pungi - This repository contains all the necessary pungi configuration files for composes that come from koji. Pungi interacts with koji to build the composes.

Comps - This repository contains all the necessary comps (which are groups and other data) for a given major version. Pungi uses this information to properly build the repositories.

Toolkit - This repository contains various scripts and utilities used by Release Engineering, such as syncing composes, functionality testing, and mirror maintenance.

"},{"location":"sop/sop_compose_8/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose_8/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/koji -> Koji files store
  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose_8/#pungi","title":"Pungi","text":"

Each repository or set of repositories are controlled by various pungi configurations. For example, r8.conf will control the absolute base of Rocky Linux 8, which imports other git repository data as well as accompanying json or other configuration files.

"},{"location":"sop/sop_compose_8/#running-a-compose","title":"Running a Compose","text":"

Inside the pungi git repository, the folder scripts contain the necessary scripts that are ran to perform a compose. There are different types of composes:

  • produce -> Generates a full compose, generally used for minor releases, which generate new ISO's
  • update -> Generates a smaller compose, generally used for updates within a minor release cycle - ISO's are not generated

Each script is titled appropriately:

  • produce-X.sh -> Generates a full compose for X major release, typically set to the current minor release according to rX.conf
  • updates-X.sh -> Generates a smaller compose for X major release, typically set to the current minor release according to rX.conf
  • updates-X-NAME.sh -> Generates a compose for the specific compose, such as NFV, Rocky-devel, Extras, or Plus

When these scripts are ran, they generate an appropriate directory under /mnt/compose/X with a directory and an accompanying symlink. For example. If an update to Rocky was made using updates-8.sh, the below would be made:

drwxr-xr-x. 5 root  root  6144 Jul 21 17:44 Rocky-8-updates-20210721.1\nlrwxrwxrwx. 1 root  root    26 Jul 21 18:26 latest-Rocky-8 -> Rocky-8-updates-20210721.1\n

This setup also allows pungi to reuse previous package set data to reduce the time it takes to build a compose. Typically during a new minor release, all composes should be ran so they can be properly combined. Example of a typical order if releasing 8.X:

produce-8.sh\nupdates-8-devel.sh\nupdates-8-extras.sh\nupdates-8-plus.sh\n
"},{"location":"sop/sop_compose_8/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran for a specific purpose. They are also ran in a certain order to ensure integrity and consistency of a release.

The below are common vars files. common_X will override what's in common. Typically these set what repositories exist and how they are named or look at the top level. These also set the current major.minor release as necessary.

.\n\u251c\u2500\u2500 common\n\u251c\u2500\u2500 common_8\n\u251c\u2500\u2500 common_9\n

These are for the releases in general. What they do is noted below.

\u251c\u2500\u2500 gen-torrents.sh                  -> Generates torrents for images\n\u251c\u2500\u2500 minor-release-sync-to-staging.sh -> Syncs a minor release to staging\n\u251c\u2500\u2500 prep-staging-X.sh                -> Preps staging updates and signs repos\n\u251c\u2500\u2500 sign-repos-only.sh               -> Signs the repomd (only)\n\u251c\u2500\u2500 sync-to-prod.sh                  -> Syncs staging to production\n\u251c\u2500\u2500 sync-to-staging.sh               -> Syncs a provided compose to staging\n\u251c\u2500\u2500 sync-to-staging-sig.sh           -> Syncs a sig provided compose to staging\n

Generally, you will only run minor-release-sync-to-staging.sh when a full minor release is being produced. So for example, if 8.5 has been built out, you would run that after a compose. gen-torrents.sh would be ran shortly after.

When doing updates, the order of operations (preferably) would be:

* sync-to-staging.sh\n* sync-to-staging-sig.sh -> Only if sigs are updated\n* prep-staging-8.sh`     -> This is required to ensure the groups, comps, and\n                            module data stay sane. This helps us provide older\n                            packages in the repos as well as signs repo metadata.\n* sync-to-prod.sh        -> After the initial testing, it is sent to prod.\n

An example of order:

# The below syncs to staging\nRLVER=8 bash sync-to-staging.sh Plus\nRLVER=8 bash sync-to-staging.sh Extras\nRLVER=8 bash sync-to-staging.sh Rocky-devel\nRLVER=8 bash sync-to-staging.sh Rocky\nbash prep-staging-8.sh\n

Once the syncs are done, staging must be tested and vetted before being sent to production. During this stage, the updateinfo.xml is also applied where necessary to the repositories to provide errata. Once staging is completed, it is synced to production.

bash RLVER=8 sync-to-prod.sh\nbash bash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

Note: If multiple releases are being updated, it is important to run the syncs to completion before running the file list parallel script.

"},{"location":"sop/sop_compose_sig/","title":"SOP: Compose and Repo Sync for Rocky Linux Special Interest Groups","text":"

This SOP covers how the Rocky Linux Release Engineering Team handles composes and repository syncs for Special Interest Groups.

"},{"location":"sop/sop_compose_sig/#contact-information","title":"Contact Information","text":"Owner Release Engineering Team Email Contact releng@rockylinux.org Email Contact infrastructure@rockylinux.org Mattermost Contacts @label @mustafa @neil @tgo Mattermost Channels ~Development"},{"location":"sop/sop_compose_sig/#composing-repositories","title":"Composing Repositories","text":""},{"location":"sop/sop_compose_sig/#mount-structure","title":"Mount Structure","text":"

There is a designated system that takes care of composing repositories. These systems contain the necessary EFS/NFS mounts for the staging and production repositories as well as composes.

  • /mnt/compose -> Compose data
  • /mnt/repos-staging -> Staging
  • /mnt/repos-production -> Production
"},{"location":"sop/sop_compose_sig/#empanadas","title":"Empanadas","text":"

Each repository or set of repositories are controlled by various comps and pungi configurations that are translated into peridot. Empanadas is used to run a reposync from peridot's yumrepofs repositories, generate ISO's, and create a pungi compose look-a-like. Because of this, the comps and pungi-rocky configuration is not referenced with empanadas.

"},{"location":"sop/sop_compose_sig/#running-a-compose","title":"Running a Compose","text":"

First, the toolkit must be cloned. In the iso/empanadas directory, run poetry install. You'll then have access to the various commands needed:

  • sync_sig

To perform a compose of a SIG, it must be defined in the configuration. As an example, here is composing the core sig.

# This creates a brand new directory under /mnt/compose/X and symlinks it to latest-SIG-Y-X\n~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages --full-run\n\n# This assumes the directories already exist and will update in place.\n~/.local/bin/poetry run sync_sig --release 9 --sig core --hashed --clean-old-packages\n
"},{"location":"sop/sop_compose_sig/#syncing-composes","title":"Syncing Composes","text":"

Syncing utilizes the sync scripts provided in the release engineering toolkit.

When the scripts are being ran, they are usually ran with a specific purpose, as each major version may be different.

For SIG's, the only files you'll need to know of are sync-to-staging-sig.sh and sync-to-prod-sig.sh. Both scripts will delete packages and data that are no longer in the compose.

# The below syncs the core 8 repos to staging\nRLVER=8 bash sync-to-staging-sig.sh core\n# The below syncs the core 9 repos to staging\nRLVER=9 bash sync-to-staging-sig.sh core\n\n# The below syncs everything in staging for 8 core to prod\nRLVER=8 bash sync-to-prod-sig.sh core\n\n# The below syncs everything in staging for 9 core to prod\nRLVER=9 bash sync-to-prod-sig.sh core\n

Once staging is completed and reviewed, it is synced to production.

bash sync-file-list-parallel.sh\n

During this phase, staging is rsynced with production, the file list is updated, and the full time list is also updated to allow mirrors to know that the repositories have been updated and that they can sync.

"},{"location":"sop/sop_mirrormanager2/","title":"Mirror Manager Maintenance","text":"

This SOP contains most if not all the information needed for SIG/Core to maintain and operate Mirror Manager for Rocky Linux.

"},{"location":"sop/sop_mirrormanager2/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo Mattermost Channels ~Infrastructure"},{"location":"sop/sop_mirrormanager2/#introduction","title":"Introduction","text":"

So you made a bad decision and now have to do things to Mirror Manager. Good luck.

"},{"location":"sop/sop_mirrormanager2/#pieces","title":"Pieces","text":"Item Runs on... Software Mirrorlist Server mirrormanager001 https://github.com/adrianreber/mirrorlist-server/ Mirror Manager 2 mirrormanager001 https://github.com/fedora-infra/mirrormanager2"},{"location":"sop/sop_mirrormanager2/#mirrorlist-server","title":"Mirrorlist Server","text":"

This runs two (2) instances. Apache/httpd is configured to send /mirrorlist to one and /debuglist to the other.

  • Every fifteen (15) minutes: Mirrorlist cache is regenerated

    • This queries the database for active mirrors and other information and writes a protobuf. The mirrorlist-server reads the protobuf and responds accordingly.
  • Every twenty (20) minutes: Service hosting /mirrorlist is restarted

  • Every twenty-one (21) minutes: Service hosting /debuglist is restarted

Note that the timing for the restart of the mirror list instances are arbitrary.

"},{"location":"sop/sop_mirrormanager2/#mirror-manager-2","title":"Mirror Manager 2","text":"

This is a uwsgi service fronted by an apache/httpd instance. This is responsible for everything else that is not /mirrorlist or /debuglist. This allows the mirror managers to, well, manage their mirrors.

"},{"location":"sop/sop_mirrormanager2/#cdn","title":"CDN","text":"

Fastly sits in front of mirror manager. VPN is required to access the /admin endpoints.

If the backend of the CDN is down, it will attempt to guess what the user wanted to access and spit out a result on the dl.rockylinux.org website. For example, a request for AppStream-8 and x86_64 will result in a AppStream/x86_64/os directory on dl.rockylinux.org. Note that this isn't perfect, but it helps in potential down time or patching.

Fastly -> www firewall -> mirrormanager server\n

In reality, the flow is a lot more complex, and a diagram should be created to map it out in a more user-friendly manner (@TODO)

User -> Fastly -> AWS NLB over TLS, passthru -> www firewall cluster (decrypt TLS) -> mirrormanager server (Rocky CA TLS)\n
"},{"location":"sop/sop_mirrormanager2/#tasks","title":"Tasks","text":"

Below are a list of possible tasks to take with mirror manager, depending on the scenario.

"},{"location":"sop/sop_mirrormanager2/#new-release","title":"New Release","text":"

For the following steps, the following must be completed:

  • Production rsync endpoints should have all brand new content
  • New content root should be locked down to 750 (without this, mirror manager cannot view it)
  • Disable mirrormanager user cronjobs

  • Update the database with the new content. This is run on a schedule normally (see previous section) but can be done manually.

    a. As the mirror manager user, run the following:

/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux'\n/opt/mirrormanager/scan-primary-mirror-0.4.2/target/debug/scan-primary-mirror --debug --config $HOME/scan-primary-mirror.toml --category 'Rocky Linux SIGs'\n
  1. Update the redirects for $reponame-$releasever

    a. Use psql to mirrormanager server: psql -U mirrormanager -W -h mirrormanager_db_host mirrormanager_db

    b. Confirm that all three columns are filled and that the second and third columns are identical:

    select rr.from_repo AS \"From Repo\", rr.to_repo AS \"To Repo\", r.prefix AS \"Target Repo\" FROM repository_redirect AS rr LEFT JOIN repository AS r ON rr.to_repo = r.prefix GROUP BY r.prefix, rr.to_repo, rr.from_repo ORDER BY r.prefix ASC;`\n

    c. Change the majorversion redirects to point to the new point release, for example:

    update repository_redirect set to_repo = regexp_replace(to_repo, '9\\.1', '9.2') where from_repo ~ '(\\w+)-9-(debug|source)';`\n

    d. Insert new redirects for the major version expected by the installer

    insert into repository_redirect (from_repo,to_repo) select REGEXP_REPLACE(rr.from_repo,'9\\.1','9.2'),REGEXP_REPLACE(rr.to_repo,'9\\.1','9.2')FROM repository_redirect AS rr WHERE from_repo ~ '(\\w+)-9.1';\n
  2. Generate the mirrorlist cache and restart the debuglist and verify.

Once the bitflip is initiated, restart mirrorlist and reenable all cronjobs.

"},{"location":"sop/sop_mirrormanager2/#out-of-date-mirrors","title":"Out-of-date Mirrors","text":"
  1. Get current shasum of repomd.xml. For example: shasum=$(curl https://dl.rockylinux.org/pub/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum)
  2. Compare against latest propagation log:
tail -latr /var/log/mirrormanager/propagation/rocky-9.0-BaseOS-x86_64_propagation.log.*`\n\nexport VER=9.0\nawk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,\":\")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < $(find /var/log/mirrormanager/propagation/ -name \"rocky-${VER}-BaseOS-x86_64_propagation.log*\" -mtime -1 | tail -1)'\n

This will generate a table. You can take the IDs in the first column and use the database to disable them by ID (table name: hosts) or go to https://mirrors.rockylinux.org/mirrormanager/host/ID and uncheck 'User active'.

Users can change user active, but they cannot change admin active. It is better to flip user active in this case.

Admins can also view https://mirrors.rockylinux.org/mirrormanager/admin/all_sites if necessary.

Example of table columns:

Note

These mirrors are here soley as an example and not to call anyone out, every mirror shows up on here at one point, for some reason, due to natural variations in how mirrors sync.

[mirrormanager@ord1-prod-mirrormanager001 propagation]$ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,\":\")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < rocky-9.0-BaseOS-x86_64_propagation.log.1660611632 | column -t\n164  mirror.host.ag            http://mirror.host.ag/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml             404\n173  rocky.centos-repo.net     http://rocky.centos-repo.net/9.0/BaseOS/x86_64/os/repodata/repomd.xml            403\n92   rocky.mirror.co.ge        http://rocky.mirror.co.ge/9.0/BaseOS/x86_64/os/repodata/repomd.xml               404\n289  mirror.vsys.host          http://mirror.vsys.host/rockylinux/9.0/BaseOS/x86_64/os/repodata/repomd.xml      404\n269  mirrors.rackbud.com       http://mirrors.rackbud.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml        200\n295  mirror.ps.kz              http://mirror.ps.kz/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml               200\n114  mirror.liteserver.nl      http://rockylinux.mirror.liteserver.nl/9.0/BaseOS/x86_64/os/repodata/repomd.xml  200\n275  mirror.upsi.edu.my        http://mirror.upsi.edu.my/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml         200\n190  mirror.kku.ac.th          http://mirror.kku.ac.th/rocky-linux/9.0/BaseOS/x86_64/os/repodata/repomd.xml     404\n292  mirrors.cat.pdx.edu       http://mirrors.cat.pdx.edu/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml        200\n370  mirrors.gbnetwork.com     http://mirrors.gbnetwork.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml      404\n308  mirror.ihost.md           http://mirror.ihost.md/rockylinux/9.0/BaseOS/x86_64/os/repodata/repomd.xml       404\n87   mirror.freedif.org        http://mirror.freedif.org/Rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml         404\n194  mirrors.bestthaihost.com  http://mirrors.bestthaihost.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml   404\n30   mirror.admax.se           http://mirror.admax.se/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml            200\n195  mirror.uepg.br            http://mirror.uepg.br/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml             404\n247  mirrors.ipserverone.com   http://mirrors.ipserverone.com/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml    404'\n
"},{"location":"sop/sop_release/","title":"Rocky Release Procedures for SIG/Core (RelEng/Infrastructure)","text":"

This SOP contains all the steps required by SIG/Core (a mix of Release Engineering and Infrastructure) to perform releases of all Rocky Linux versions. Work is in all collaboration within the entire group of engineerings.

"},{"location":"sop/sop_release/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo @skip77 @mustafa @sherif @pgreco Mattermost Channels ~Infrastructure"},{"location":"sop/sop_release/#preparation","title":"Preparation","text":""},{"location":"sop/sop_release/#notes-about-release-day","title":"Notes about Release Day","text":"

Within a minimum of two (2) days, the following should be true:

  1. Torrents should be setup. All files can be synced with the seed box(es) but not yet published. The data should be verified using sha256sum and compared to the CHECKSUM files provided with the files.

  2. Website should be ready (typically with an open PR in github). The content should be verified that the design and content are correct and finalized.

  3. Enough mirrors should be setup. This essentially means that all content for a release should be synced to our primary mirror with the executable bit turned off, and the content should also be hard linked. In theory, mirror manager can be queried to verify if mirrors are or appear to be in sync.

"},{"location":"sop/sop_release/#notes-about-patch-days","title":"Notes about Patch Days","text":"

Within a minimum of one (1) to two (2) days, the following should be true:

  1. Updates should be completed in the build system, and verified in staging.

  2. Updates should be sent to production and file lists updated to allow mirrors to sync.

"},{"location":"sop/sop_release/#prior-to-release-day-notes","title":"Prior to Release Day notes","text":"

Ensure the SIG/Core Checklist is read thoroughly and executed as listed.

"},{"location":"sop/sop_release/#release-day","title":"Release Day","text":""},{"location":"sop/sop_release/#priorities","title":"Priorities","text":"

During release day, these should be verified/completed in order:

  1. Website - The primary website and user landing at rockylinux.org should allow the user to efficiently click through to a download link of an ISO, image, or torrent. It must be kept up.

  2. Torrent - The seed box(es) should be primed and ready to go for users downloading via torrent.

  3. Release Notes & Documentation - The release notes are often on the same website as the documentation. The main website and where applicable in the docs should refer to the Release Notes of Rocky Linux.

  4. Wiki - If applicable, the necessary changes and resources should be available for a release. In particular, if a major release has new repos, changed repo names, this should be documented.

  5. Everything else!

"},{"location":"sop/sop_release/#resources","title":"Resources","text":""},{"location":"sop/sop_release/#sigcore-checklist","title":"SIG/Core Checklist","text":""},{"location":"sop/sop_release/#beta","title":"Beta","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous beta release (rltype=beta)
    • Boot image install migration from previous beta release
  • Pass image to Testing Team for final validation

"},{"location":"sop/sop_release/#release-candidate","title":"Release Candidate","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous release
    • Boot image install migration from previous release
  • Pass image to Testing Team for validation

"},{"location":"sop/sop_release/#final","title":"Final","text":"
  • Compose Completed
  • Repoclosure must be checked and pass
  • Lorax Run
  • ISO's are built
  • Cloud Images built
  • Live Images built
  • Compose Synced to Staging
  • AWS/Azure Images in Marketplace
  • Vagrant Images
  • Container Images
  • Mirror Manager

    • Ready to Migrate from previous release
    • Boot image install migration from previous release
  • Pass image to Testing Team for final validation

  • Sync to Production
  • Sync to Europe Mirror if applicable
  • Hardlink Run
  • Bitflip after 24-48 Hours
Resources Account ServicesGit (RESF Git Service)Git (Rocky Linux GitHub)Git (Rocky Linux GitLab)Mail ListsContacts

URL: https://accounts.rockylinux.org

Purpose: Account Services maintains the accounts for almost all components of the Rocky ecosystem

Technology: Noggin used by Fedora Infrastructure

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

URL: https://git.resf.org

Purpose: General projects, code, and so on for the Rocky Enterprise Software Foundation.

Technology: Gitea

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://github.com/rocky-linux

Purpose: General purpose code, assets, and so on for Rocky Linux. Some content is mirrored to the RESF Git Service.

Technology: GitHub

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://git.rockylinux.org

Purpose: Packages and light code for the Rocky Linux distribution

Technology: GitLab

Contact: ~Infrastructure, ~Development in Mattermost and #rockylinux-infra, #rockylinux-devel in Libera IRC

URL: https://lists.resf.org

Purpose: Users can subscribe and interact with various mail lists for the Rocky ecosystem

Technology: Mailman 3 + Hyper Kitty

Contact: ~Infrastructure in Mattermost and #rockylinux-infra in Libera IRC

Name Email Mattermost Name IRC Name Louis Abel label@rockylinux.org @nazunalika Sokel/label/Sombra Mustafa Gezen mustafa@rockylinux.org @mustafa mstg Skip Grube skip@rockylinux.org @skip77 Sherif Nagy sherif@rockylinux.org @sherif Pablo Greco pgreco@rockylinux.org @pgreco pgreco Neil Hanlon neil@resf.org @neil neil Taylor Goodwill tg@resf.org @tgo tg"},{"location":"sop/sop_upstream_prep_checklist/","title":"Generalized Prep Checklist for Upcoming Releases","text":"

This SOP contains general checklists required by SIG/Core to prepare and plan for the upcoming release. This work, in general, is required to be done on a routine basis, even months out before the next major or minor release, as it requires monitoring of upstream's (CentOS Stream) work to ensure Rocky Linux will remain ready and compatible with Red Hat Enterprise Linux.

"},{"location":"sop/sop_upstream_prep_checklist/#contact-information","title":"Contact Information","text":"Owner SIG/Core (Release Engineering & Infrastructure) Email Contact infrastructure@rockylinux.org Email Contact releng@rockylinux.org Mattermost Contacts @label @neil @tgo @skip77 @mustafa @sherif @pgreco Mattermost Channels ~Infrastructure"},{"location":"sop/sop_upstream_prep_checklist/#general-upstream-monitoring","title":"General Upstream Monitoring","text":"

It is expected to monitor the following repositories upstream, as these will indicate what is coming up for a given major or point release. These repositories are found at the Red Hat gitlab.

  • centos-release
  • centos-logos
  • pungi-centos
  • comps
  • module-defaults

These repositories can be monitored by setting to \"all activity\" on the bell icon.

Upon changes to the upstream repositories, SIG/Core member should analyze the changes and apply the same to the lookahead branches:

  • rocky-release

    • Manual changes required
  • rocky-logos

    • Manual changes required
  • pungi-rocky

    • Run sync-from-upstream
  • peridot-rocky

    • Configurations are generated using peridot tools
  • comps

    • Run sync-from-upstream
  • rocky-module-defaults

    • Run sync-from-upstream
"},{"location":"sop/sop_upstream_prep_checklist/#general-downward-merging","title":"General Downward Merging","text":"

Repositories that generally track for LookAhead and Beta releases will flow downward to the stable branch. For example:

* rXs / rXlh\n      |\n      |----> rX-beta\n                |\n                |----> rX\n

This applies to any specific rocky repo, such as comps, pungi, peridot-config, and so on. As it is expected some repos will deviate in commit history, it is OK to force push, under the assumption that changes made in the lower branch exists in the upper branch. That way you can avoid changes/functionality being reverted on accident.

"},{"location":"sop/sop_upstream_prep_checklist/#general-package-patching","title":"General Package Patching","text":"

There are packages that are patched typically for the purpose of debranding. List of patched packages are typically maintained in a metadata repository. The obvious ones are listed below and should be monitored and maintained properly:

  • abrt
  • anaconda
  • anaconda-user-help
  • chrony
  • cockpit
  • dhcp
  • dnf
  • firefox
  • fwupd
  • gcc
  • gnome-session
  • gnome-settings-daemon
  • grub2
  • initial-setup
  • kernel
  • kernel-rt
  • libdnf
  • libreoffice
  • libreport
  • lorax-templates-rhel
  • nginx
  • opa-ff
  • opa-fm
  • openldap
  • openscap
  • osbuild
  • osbuild-composer
  • PackageKit
  • pesign
  • python-pip
  • redhat-rpm-config
  • scap-security-guide
  • shim
  • shim-unsigned-x64
  • shim-unsigned-aarch64
  • subscription-manager
  • systemd
  • thunderbird
"}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index 05b5a49..2620d7c 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,107 +2,107 @@ https://sig-core.rocky.page/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/members/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/what_we_do/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/empanadas/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/peridot/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/rebuild/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/compose/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/compose/koji/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/references/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/references/empanadas_common/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/references/empanadas_config/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/documentation/references/empanadas_sig_config/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/include/resources_bottom/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_compose/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_compose_8/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_compose_sig/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_mirrormanager2/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_release/ - 2023-04-15 + 2023-05-16 daily https://sig-core.rocky.page/sop/sop_upstream_prep_checklist/ - 2023-04-15 + 2023-05-16 daily \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz index 82128c9..b55cf0b 100644 Binary files a/sitemap.xml.gz and b/sitemap.xml.gz differ diff --git a/sop/index.html b/sop/index.html index 1d5b0b9..ffcbdd6 100644 --- a/sop/index.html +++ b/sop/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -722,7 +728,7 @@ to find the various pages of interest.

- + diff --git a/sop/sop_compose/index.html b/sop/sop_compose/index.html index 869da70..53a7ab9 100644 --- a/sop/sop_compose/index.html +++ b/sop/sop_compose/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -920,7 +926,7 @@ bash sync-file-list-parallel.sh - + diff --git a/sop/sop_compose_8/index.html b/sop/sop_compose_8/index.html index ac5ea61..a3b579e 100644 --- a/sop/sop_compose_8/index.html +++ b/sop/sop_compose_8/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -914,7 +920,7 @@ bash bash sync-file-list-parallel.sh - + diff --git a/sop/sop_compose_sig/index.html b/sop/sop_compose_sig/index.html index 2ffe51f..b6c982f 100644 --- a/sop/sop_compose_sig/index.html +++ b/sop/sop_compose_sig/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -868,7 +874,7 @@ RLVER=9 bash sync-to-prod-sig.sh core - + diff --git a/sop/sop_mirrormanager2/index.html b/sop/sop_mirrormanager2/index.html index 77aed21..50b12ef 100644 --- a/sop/sop_mirrormanager2/index.html +++ b/sop/sop_mirrormanager2/index.html @@ -17,7 +17,7 @@ - + @@ -25,10 +25,10 @@ - + - + @@ -114,25 +114,31 @@ -
- + + - + + + + + + + - + + + + + + + - - - - - - - -
+ + @@ -873,9 +879,16 @@ mirror managers to, well, manage their mirrors.

  1. Update the redirects for $reponame-$releasever

    -

    a. Use psql to mirrormanager server: psql -U mirrormanager -W -h mirrormanager_db_host mirrormanager_db -b. Confirm that all three columns are filled and that the second and third columns are identical: select rr.from_repo AS "From Repo", rr.to_repo AS "To Repo", r.prefix AS "Target Repo" FROM repository_redirect AS rr LEFT JOIN repository AS r ON rr.to_repo = r.prefix GROUP BY r.prefix, rr.to_repo, rr.from_repo ORDER BY r.prefix ASC; -c. Change the majorversion redirects to point to the new point release, for example: update repository_redirect set to_repo = regexp_replace(to_repo, '9\.0', '9.1') where from_repo ~ '(\w+)-9';

    +

    a. Use psql to mirrormanager server: psql -U mirrormanager -W -h mirrormanager_db_host mirrormanager_db

    +

    b. Confirm that all three columns are filled and that the second and third columns are identical: +

    select rr.from_repo AS "From Repo", rr.to_repo AS "To Repo", r.prefix AS "Target Repo" FROM repository_redirect AS rr LEFT JOIN repository AS r ON rr.to_repo = r.prefix GROUP BY r.prefix, rr.to_repo, rr.from_repo ORDER BY r.prefix ASC;`
    +

    +

    c. Change the majorversion redirects to point to the new point release, for example: +

    update repository_redirect set to_repo = regexp_replace(to_repo, '9\.1', '9.2') where from_repo ~ '(\w+)-9-(debug|source)';`
    +

    +

    d. Insert new redirects for the major version expected by the installer

    +
    insert into repository_redirect (from_repo,to_repo) select REGEXP_REPLACE(rr.from_repo,'9\.1','9.2'),REGEXP_REPLACE(rr.to_repo,'9\.1','9.2')FROM repository_redirect AS rr WHERE from_repo ~ '(\w+)-9.1';
    +
  2. Generate the mirrorlist cache and restart the debuglist and verify.

    @@ -896,6 +909,10 @@ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/o

    Users can change user active, but they cannot change admin active. It is better to flip user active in this case.

    Admins can also view https://mirrors.rockylinux.org/mirrormanager/admin/all_sites if necessary.

    Example of table columns:

    +
    +

    Note

    +

    These mirrors are here soley as an example and not to call anyone out, every mirror shows up on here at one point, for some reason, due to natural variations in how mirrors sync.

    +
    [mirrormanager@ord1-prod-mirrormanager001 propagation]$ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml | sha256sum | awk '{print $1}') -F'::' '{split($0,data,":")} {if ($4 != shasum) {print data[5], data[6], $2, $7}}' < rocky-9.0-BaseOS-x86_64_propagation.log.1660611632 | column -t
     164  mirror.host.ag            http://mirror.host.ag/rocky/9.0/BaseOS/x86_64/os/repodata/repomd.xml             404
     173  rocky.centos-repo.net     http://rocky.centos-repo.net/9.0/BaseOS/x86_64/os/repodata/repomd.xml            403
    @@ -921,7 +938,7 @@ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/o
       
         
           Last update:
    -      2022-11-15
    +      2023-05-16
         
       
     
    @@ -969,7 +986,7 @@ awk -v shasum=$(curl -s https://dl.rockylinux.org/pub/rocky/$VER/BaseOS/x86_64/o - + diff --git a/sop/sop_release/index.html b/sop/sop_release/index.html index 7d6106d..0cb3c2a 100644 --- a/sop/sop_release/index.html +++ b/sop/sop_release/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
    - + + - + + + + + + + - + + + + + + + - - - - - - - -
    + + @@ -1105,7 +1111,7 @@ this should be documented.

    - + diff --git a/sop/sop_upstream_prep_checklist/index.html b/sop/sop_upstream_prep_checklist/index.html index 7df9430..38e8c4f 100644 --- a/sop/sop_upstream_prep_checklist/index.html +++ b/sop/sop_upstream_prep_checklist/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -116,25 +116,31 @@ -
    - + + - + + + + + + + - + + + + + + + - - - - - - - -
    + + @@ -922,7 +928,7 @@ obvious ones are listed below and should be monitored and maintained properly:{"base": "../..", "features": ["search.highlight", "search.suggest", "toc.integrate"], "search": "../../assets/javascripts/workers/search.208ed371.min.js", "translations": {"clipboard.copied": "Copied to clipboard", "clipboard.copy": "Copy to clipboard", "search.result.more.one": "1 more on this page", "search.result.more.other": "# more on this page", "search.result.none": "No matching documents", "search.result.one": "1 matching document", "search.result.other": "# matching documents", "search.result.placeholder": "Type to start searching", "search.result.term.missing": "Missing", "select.version": "Select version"}} - + diff --git a/what_we_do/index.html b/what_we_do/index.html index 9b11967..122ad21 100644 --- a/what_we_do/index.html +++ b/what_we_do/index.html @@ -19,7 +19,7 @@ - + @@ -27,10 +27,10 @@ - + - + @@ -111,25 +111,31 @@ -
    - + + - + + + + + + + - + + + + + + + - - - - - - - -
    + + @@ -729,7 +735,7 @@ - +