Skip to content

Commit

Permalink
Merge branch 'uyuni-project:master' into qe-get-rid-of-tools-pool
Browse files Browse the repository at this point in the history
  • Loading branch information
srbarrios authored Jan 30, 2025
2 parents 732e46c + 064627e commit df35611
Show file tree
Hide file tree
Showing 338 changed files with 9,239 additions and 53 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build_containers.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
labels: ${{ steps.meta.outputs.labels }}
build-args: |
BASE=registry.opensuse.org/uyuni/server
VERSION=2024.08
VERSION=2024.12
build-and-push-ubuntu-minion-image:
runs-on: ubuntu-latest
permissions:
Expand Down
17 changes: 8 additions & 9 deletions java/code/src/com/redhat/rhn/manager/org/CreateOrgCommand.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,16 @@
import com.redhat.rhn.domain.user.User;
import com.redhat.rhn.manager.kickstart.crypto.CreateCryptoKeyCommand;
import com.redhat.rhn.manager.user.CreateUserCommand;
import com.redhat.rhn.taskomatic.TaskomaticApi;
import com.redhat.rhn.taskomatic.TaskomaticApiException;
import com.redhat.rhn.taskomatic.NoSuchBunchTaskException;
import com.redhat.rhn.taskomatic.TaskoFactory;

import com.suse.manager.webui.services.SaltStateGeneratorService;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.quartz.SchedulerException;

import java.util.Date;
import java.util.HashMap;
import java.util.Map;

Expand Down Expand Up @@ -164,7 +166,6 @@ public ValidatorError[] store() {
CryptoKey ssl = KickstartFactory.lookupCryptoKey("RHN-ORG-TRUSTED-SSL-CERT",
defaultOrg);
if (ssl != null && ssl.isSSL()) {
// TODO
log.debug("Found a SSL key for the default org to copy: {}", ssl.getId());
CreateCryptoKeyCommand createCryptoKey =
new CreateCryptoKeyCommand(createdOrg);
Expand All @@ -177,15 +178,13 @@ public ValidatorError[] store() {
ChannelFamilyFactory.lookupOrCreatePrivateFamily(createdOrg);

if (firstOrg) {
Map<String, String> params = new HashMap<>();
Map<String, Object> params = new HashMap<>();
params.put("noRepoSync", "true");
try {
new TaskomaticApi().scheduleSingleSatBunch(newUser,
"mgr-sync-refresh-bunch", params);
TaskoFactory.addSingleBunchRun(null, "mgr-sync-refresh-bunch", params, new Date());
}
catch (TaskomaticApiException e) {
log.error("Problem when running Taskomatic mgr-sync-refresh job: {}", e.getMessage());
// FIXME: return validator error ?
catch (NoSuchBunchTaskException | SchedulerException e) {
log.error("Failed to schedule mgr-sync-refresh job: {}", e.getMessage());
}
}
return null;
Expand Down
74 changes: 74 additions & 0 deletions java/code/src/com/redhat/rhn/taskomatic/TaskoFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@
package com.redhat.rhn.taskomatic;


import static org.quartz.TriggerKey.triggerKey;

import com.redhat.rhn.common.hibernate.HibernateFactory;
import com.redhat.rhn.taskomatic.core.SchedulerKernel;
import com.redhat.rhn.taskomatic.domain.TaskoBunch;
import com.redhat.rhn.taskomatic.domain.TaskoRun;
import com.redhat.rhn.taskomatic.domain.TaskoSchedule;
Expand Down Expand Up @@ -503,4 +506,75 @@ public static List<TaskoSchedule> listRepoSyncSchedulesNewerThan(Date date) {
return singleton.listObjectsByNamedQuery("TaskoSchedule.listNewerThanByBunch",
Map.of("bunch_id", bunch.getId(), "date", date));
}

protected static TaskoBunch checkBunchName(Integer orgId, String bunchName) throws NoSuchBunchTaskException {
TaskoBunch bunch;
if (orgId == null) {
bunch = TaskoFactory.lookupSatBunchByName(bunchName);
}
else {
bunch = TaskoFactory.lookupOrgBunchByName(bunchName);
}
if (bunch == null) {
throw new NoSuchBunchTaskException(bunchName);
}
return bunch;
}

/**
* Get a unique label single job.
*
* @param orgId the organisation ID for the job
* @param bunchName the bunch name
* @return the unique job label
*
* @throws SchedulerException in case of internal scheduler error
* * *
* @throws SchedulerException in case of internal scheduler error
*/
protected static String getUniqueSingleJobLabel(Integer orgId, String bunchName) throws SchedulerException {
String jobLabel = "single-" + bunchName + "-";
int count = 0;
while (!TaskoFactory.listSchedulesByOrgAndLabel(orgId, jobLabel + count).isEmpty() ||
(SchedulerKernel.getScheduler() != null && SchedulerKernel.getScheduler()
.getTrigger(triggerKey(jobLabel + count, TaskoQuartzHelper.getGroupName(orgId))) != null)) {
count++;
}
return jobLabel + count;
}

/**
* Create a new single bunch run in the database.
*
* @param orgId the organization ID
* @param bunchName the bunch name
* @param params the job parameters
* @param start the start date of the job
* @throws NoSuchBunchTaskException if the bunchName doesn't refer to an existing bunch
* @throws SchedulerException for internal scheduler errors
*/
public static void addSingleBunchRun(Integer orgId, String bunchName, Map<String, Object> params, Date start)
throws NoSuchBunchTaskException, SchedulerException {
TaskoBunch bunch = checkBunchName(orgId, bunchName);
String jobLabel = getUniqueSingleJobLabel(null, bunchName);
List<TaskoSchedule> taskoSchedules = TaskoFactory.listScheduleByLabel(jobLabel);

TaskoSchedule schedule;
if (taskoSchedules.isEmpty()) {
// create schedule
schedule = new TaskoSchedule(orgId, bunch, jobLabel, params, start, null, null);
}
else {
// update existing schedule
schedule = taskoSchedules.get(0);
schedule.setBunch(bunch);
schedule.setDataMap(params);
schedule.setActiveFrom(start);
}
// Don't set active till until the job it actually runs.
schedule.setActiveTill(null);
TaskoFactory.save(schedule);
HibernateFactory.commitTransaction();
log.info("Schedule created for {}.", jobLabel);
}
}
36 changes: 4 additions & 32 deletions java/code/src/com/redhat/rhn/taskomatic/TaskoXmlRpcHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ public Date scheduleSatBunch(String bunchName, String jobLabel,
private TaskoBunch doBasicCheck(Integer orgId, String bunchName, String jobLabel)
throws NoSuchBunchTaskException, InvalidParamException, SchedulerException {

TaskoBunch bunch = checkBunchName(orgId, bunchName);
TaskoBunch bunch = TaskoFactory.checkBunchName(orgId, bunchName);
isAlreadyScheduled(orgId, jobLabel);
return bunch;
}
Expand Down Expand Up @@ -307,7 +307,7 @@ public List<Date> scheduleRuns(String bunchName, String jobLabel, List<Map<?, ?
public Date scheduleSingleBunchRun(Integer orgId, String bunchName, Map params, Date start)
throws NoSuchBunchTaskException, InvalidParamException, SchedulerException {

String jobLabel = getUniqueSingleJobLabel(orgId, bunchName);
String jobLabel = TaskoFactory.getUniqueSingleJobLabel(orgId, bunchName);
return scheduleSingleBunchRun(orgId, bunchName, jobLabel, params, start);
}

Expand Down Expand Up @@ -367,13 +367,12 @@ public Date scheduleSingleBunchRun(Integer orgId, String bunchName, String jobLa
* @return List of scheduled dates
* @throws NoSuchBunchTaskException thrown if bunch name not known
* @throws InvalidParamException shall not be thrown
*/
public List<Date> scheduleRuns(Integer orgId, String bunchName, String jobLabel, List<Map<?, ?>> paramsList)
throws NoSuchBunchTaskException, InvalidParamException {
throws NoSuchBunchTaskException {

List<Date> scheduleDates = new ArrayList<>();
TaskoBunch bunch = checkBunchName(orgId, bunchName);
TaskoBunch bunch = TaskoFactory.checkBunchName(orgId, bunchName);
for (Map params : paramsList) {
String label = getJobLabel(params, jobLabel);

Expand Down Expand Up @@ -454,33 +453,6 @@ public Date scheduleSingleSatBunchRun(String bunchName, Map params)
return scheduleSingleBunchRun(null, bunchName, params, new Date());
}

private String getUniqueSingleJobLabel(Integer orgId, String bunchName) throws SchedulerException {
String jobLabel = "single-" + bunchName + "-";
int count = 0;
while (!TaskoFactory.listSchedulesByOrgAndLabel(orgId, jobLabel + count)
.isEmpty() ||
(SchedulerKernel.getScheduler()
.getTrigger(triggerKey(jobLabel + count,
TaskoQuartzHelper.getGroupName(orgId))) != null)) {
count++;
}
return jobLabel + count;
}

private TaskoBunch checkBunchName(Integer orgId, String bunchName) throws NoSuchBunchTaskException {
TaskoBunch bunch = null;
if (orgId == null) {
bunch = TaskoFactory.lookupSatBunchByName(bunchName);
}
else {
bunch = TaskoFactory.lookupOrgBunchByName(bunchName);
}
if (bunch == null) {
throw new NoSuchBunchTaskException(bunchName);
}
return bunch;
}

/**
* lists all organizational schedules
* @param orgId organization id
Expand Down
6 changes: 6 additions & 0 deletions java/spacewalk-java.changes
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
-------------------------------------------------------------------
Wed Jan 29 15:40:55 CET 2025 - [email protected]

- version 5.1.8-0
* Don't require taskomatic during the setup

-------------------------------------------------------------------
Fri Jan 24 14:20:49 CET 2025 - [email protected]

Expand Down
2 changes: 1 addition & 1 deletion java/spacewalk-java.spec
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
%endif

Name: spacewalk-java
Version: 5.1.7
Version: 5.1.8
Release: 0
Summary: Java web application files for Spacewalk
License: GPL-2.0-only
Expand Down
2 changes: 1 addition & 1 deletion rel-eng/packages/spacewalk-java
Original file line number Diff line number Diff line change
@@ -1 +1 @@
5.1.7-0 java/
5.1.8-0 java/
3 changes: 2 additions & 1 deletion testsuite/dockerfiles/buildhost/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ RUN zypper -n ar --no-gpgcheck https://download.opensuse.org/repositories/system
zypper -n ar --no-gpgcheck https://download.opensuse.org/repositories/systemsmanagement:/Uyuni:/Stable:/openSUSE_Leap_15-Uyuni-Client-Tools/openSUSE_Leap_15.0/ tools_pool_repo && \
zypper ref -f && \
zypper -n install openssh-server openssh-clients hostname iproute2 venv-salt-minion andromeda-dummy milkyway-dummy virgo-dummy openscap-utils openscap-content scap-security-guide gzip udev dmidecode tar \
golang-github-prometheus-prometheus golang-github-prometheus-alertmanager prometheus-blackbox_exporter golang-github-prometheus-node_exporter golang-github-lusitaniae-apache_exporter prometheus-postgres_exporter golang-github-QubitProducts-exporter_exporter golang-github-prometheus-promu ansible && \
golang-github-prometheus-prometheus golang-github-prometheus-alertmanager prometheus-blackbox_exporter golang-github-prometheus-node_exporter golang-github-lusitaniae-apache_exporter prometheus-postgres_exporter golang-github-QubitProducts-exporter_exporter golang-github-prometheus-promu ansible git python3-kiwi && \
zypper clean -a
RUN zypper -n ar --no-gpgcheck https://download.opensuse.org/repositories/systemsmanagement:/Uyuni:/Test-Packages:/Pool/rpm/ test_repo_rpm_pool
# specific to buildhost
Expand All @@ -13,5 +13,6 @@ RUN zypper ref -f && \
&& \
zypper clean -a
COPY etc_pam.d_sshd /etc/pam.d/sshd
RUN mkdir -p /var/lib/Kiwi
CMD ssh-keygen -A && /usr/sbin/sshd -De

8 changes: 4 additions & 4 deletions testsuite/dockerfiles/server-all-in-one-dev/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ FROM ${BASE}:${VERSION}
RUN zypper -n --gpg-auto-import-keys ref && \
zypper -n install \
java-17-openjdk-devel \
openssh \
rsync \
apache-ivy \
ant \
Expand All @@ -14,10 +13,10 @@ RUN zypper -n --gpg-auto-import-keys ref && \
cpio \
spacecmd \
expect \
prometheus \
wget && \
zypper addrepo --no-gpgcheck obs://systemsmanagement:Uyuni:Utils systemsmanagement:uyuni:utils && \
zypper -n install obs-to-maven yarn && \
zypper addrepo --no-gpgcheck https://download.opensuse.org/repositories/systemsmanagement:/Uyuni:/Utils/SLE_15_SP6/ systemsmanagement:uyuni:utils && \
zypper addrepo --no-gpgcheck https://download.opensuse.org/repositories/systemsmanagement:/Uyuni:/Stable:/SLE15-Uyuni-Client-Tools/SLE_15/systemsmanagement:Uyuni:Stable:SLE15-Uyuni-Client-Tools.repo && \
zypper -n install obs-to-maven yarn prometheus && \
zypper clean -a
COPY minima.yaml /etc/minima.yaml
RUN mkdir /tmp/minima && \
Expand All @@ -32,4 +31,5 @@ RUN /usr/bin/minima sync -c /etc/minima.yaml && \
rm -rf /srv/www/htdocs/pub/repositories/
RUN cd /srv/www/htdocs/pub && ln -s TestRepoRpmUpdates AnotherRepo
RUN mkdir /etc/pki/rpm-gpg && wget -c http://download.opensuse.org/repositories/systemsmanagement:/Uyuni:/Test-Packages:/Pool/rpm/repodata/repomd.xml.key -O /etc/pki/rpm-gpg/uyuni-tools-gpg-pubkey-0d20833e.key
COPY mirror /mirror

29 changes: 29 additions & 0 deletions testsuite/dockerfiles/server-all-in-one-dev/README-mirror.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# How to update the mirror directory

## Product information

You need to update the json files inside the mirror directory with the information of new products.
You can use minima-mirror to mirror SCC, provided you have credentials, and then manually copy and paste the product information.
Be careful not to copy the `organitzations_subscriptions.json` file.

Then, use `sed` to replace ?XXXXXX in the urls in those json files.

`sed 's|\(http://[^?]*\)\?.*$|\1|' -i FILE.json`
`sed 's|\(https://[^?]*\)\?.*$|\1|' -i FILE.json`

Replace FILE.json for every json file inside mirror directory.

## Repositories

If you need new repositories, you need to create an empty directory for the ones you need and then run `create_repo`, to generate the new metadata.
Initially you can create the repositories empty.

Then, you need to edit the list of empty repos in `testsuite/features/support/constants.rb`.

## Packages

If you need new packages, create or edit the get_rpms.sh script inside the repository, to download the new package.
Just write the `wget command` you need to use to download the new package.

Then, download the package and call `create_repo`.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
<revision>1726819078</revision>
<data type="primary">
<checksum type="sha256">1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae</checksum>
<open-checksum type="sha256">e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f</open-checksum>
<location href="repodata/1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae-primary.xml.gz"/>
<timestamp>1726819078</timestamp>
<size>134</size>
<open-size>167</open-size>
</data>
<data type="filelists">
<checksum type="sha256">95a4415d859d7120efb6b3cf964c07bebbff9a5275ca673e6e74a97bcbfb2a5f</checksum>
<open-checksum type="sha256">bf9808b81cb2dbc54b4b8e35adc584ddcaa73bd81f7088d73bf7dbbada961310</open-checksum>
<location href="repodata/95a4415d859d7120efb6b3cf964c07bebbff9a5275ca673e6e74a97bcbfb2a5f-filelists.xml.gz"/>
<timestamp>1726819078</timestamp>
<size>123</size>
<open-size>125</open-size>
</data>
<data type="other">
<checksum type="sha256">ef3e20691954c3d1318ec3071a982da339f4ed76967ded668b795c9e070aaab6</checksum>
<open-checksum type="sha256">e0ed5e0054194df036cf09c1a911e15bf2a4e7f26f2a788b6f47d53e80717ccc</open-checksum>
<location href="repodata/ef3e20691954c3d1318ec3071a982da339f4ed76967ded668b795c9e070aaab6-other.xml.gz"/>
<timestamp>1726819078</timestamp>
<size>123</size>
<open-size>121</open-size>
</data>
<data type="primary_db">
<checksum type="sha256">15f48f2468bb83e693bc7cca826979327546863680965cd43735f146e8187e2f</checksum>
<open-checksum type="sha256">3c556248a85f90ffd8dd54105240638c48a5ddc81d98fd184e4613c46a002276</open-checksum>
<location href="repodata/15f48f2468bb83e693bc7cca826979327546863680965cd43735f146e8187e2f-primary.sqlite.bz2"/>
<timestamp>1726819078</timestamp>
<size>1337</size>
<open-size>106496</open-size>
<database_version>10</database_version>
</data>
<data type="filelists_db">
<checksum type="sha256">bbe7310d4f4a8a6e619ffdcc82603bdb701c32a70a5327ab9f5d7e31e4607d9c</checksum>
<open-checksum type="sha256">f5f6518ea3ef8e7ae5f74a85db4c8eb6362fc3ac7f5b78653ef78d2d37342a09</open-checksum>
<location href="repodata/bbe7310d4f4a8a6e619ffdcc82603bdb701c32a70a5327ab9f5d7e31e4607d9c-filelists.sqlite.bz2"/>
<timestamp>1726819078</timestamp>
<size>606</size>
<open-size>28672</open-size>
<database_version>10</database_version>
</data>
<data type="other_db">
<checksum type="sha256">79b58e6ee2e2bb92fce1adbf9c5507a9ad3b5ffd85cf15cac58f82f6d80b7cf8</checksum>
<open-checksum type="sha256">ae5996be68fa4422603c7c6c9a7ff498871b7d3ab7ec1e051e9919f5b53c8d46</open-checksum>
<location href="repodata/79b58e6ee2e2bb92fce1adbf9c5507a9ad3b5ffd85cf15cac58f82f6d80b7cf8-other.sqlite.bz2"/>
<timestamp>1726819078</timestamp>
<size>580</size>
<open-size>24576</open-size>
<database_version>10</database_version>
</data>
</repomd>
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading

0 comments on commit df35611

Please sign in to comment.