diff --git a/.dockerignore b/.dockerignore
index d7db6ad3a68393b58df541bb6d1a8ea6b609a5ab..2913fc1235255b965b0adb42db4d64c5ac27ec00 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,3 +1,5 @@
+**/.bundle
+**/vendor/bundle
 src/api/tmp/*
 src/api/log/*
 src/backend/t/tmp/*
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..118f96ffc30bfc991ac29a86bf37bc8185fa92ac
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,175 @@
+stages:
+  - docker
+  - test
+  - tag
+
+variables:
+  TAG_SHA: $CI_COMMIT_SHORT_SHA-P$CI_PIPELINE_ID
+  GIT_SUBMODULE_STRATEGY: recursive
+
+.build-docker-image:
+  stage: docker
+  tags:
+    - lightweight
+  image:
+    name: gcr.io/kaniko-project/executor:debug
+    entrypoint: [""]
+  script:
+    - |
+      cat << EOF > /kaniko/.docker/config.json
+      {
+        "auths":{
+          "$CI_REGISTRY": {
+            "username":"$CI_REGISTRY_USER",
+            "password":"$CI_REGISTRY_PASSWORD"
+          }
+        }
+      }
+      EOF
+    - >
+      /kaniko/executor
+      --context $CI_PROJECT_DIR
+      --dockerfile $CI_PROJECT_DIR/Dockerfile.$image
+      --destination $CI_REGISTRY_IMAGE/$image:$TAG_SHA
+      --destination $CI_REGISTRY_IMAGE/$image:$CI_COMMIT_REF_SLUG
+      --build-arg REGISTRY=$CI_REGISTRY_IMAGE
+      --build-arg TAG=$CI_COMMIT_REF_SLUG
+      --single-snapshot
+    - echo Pushed $CI_REGISTRY_IMAGE/$image:$CI_COMMIT_REF_SLUG
+              and $CI_REGISTRY_IMAGE/$image:$TAG_SHA
+
+frontend-base:
+  extends: .build-docker-image
+  only:
+    changes:
+      - Dockerfile.frontend-base
+      - src/api/app/assets
+      - src/api/public
+      - src/api/vendor
+      - src/api/Gemfile
+      - src/api/Gemfile.lock
+  variables:
+    image: frontend-base
+
+frontend:
+  extends: .build-docker-image
+  needs:
+    - job: frontend-base
+      optional: true
+  variables:
+    image: frontend
+
+backend:
+  extends: .build-docker-image
+  variables:
+    image: backend
+
+worker:
+  extends: .build-docker-image
+  needs:
+    - job: backend
+  variables:
+    image: worker
+
+.boot-instance:
+  image: debian:bookworm-slim
+  tags:
+    - heavyweight
+  services:
+    - name: docker:stable-dind
+      alias: docker
+  variables:
+    DOCKER_DRIVER: overlay2
+    DOCKER_HOST: tcp://docker:2375
+    DOCKER_TLS_CERTDIR: ""
+    COMPOSE_OVERLAY: |
+      version: '2.1'
+      services:
+        frontend:
+          image: $CI_REGISTRY_IMAGE/frontend:$TAG_SHA
+          ports:
+            - "80:3000"
+        backend:
+          image: $CI_REGISTRY_IMAGE/backend:$TAG_SHA
+          ports:
+            - "5252:5252"
+        worker:
+          image: $CI_REGISTRY_IMAGE/worker:$TAG_SHA
+      volumes:
+        backend-logs:
+          driver_opts:
+            type: none
+            o: bind
+            device: './logs/backend'
+        frontend-logs:
+          driver_opts:
+            type: none
+            o: bind
+            device: './logs/frontend'
+        worker-logs:
+          driver_opts:
+            type: none
+            o: bind
+            device: './logs/worker'
+  before_script:
+    - apt update && apt install -y --no-install-recommends
+        curl
+        docker-compose
+        docker.io
+        dpkg-dev
+        osc
+        osc-plugin-dput
+        wait-for-it
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" "$CI_REGISTRY"
+    - docker info
+    - echo "$COMPOSE_OVERLAY" > docker-compose.override.yml
+    - mkdir -p logs/backend logs/frontend logs/worker
+    - |
+      cat << EOF > ~/.oscrc
+      [general]
+      apiurl = http://docker
+      [http://docker]
+      user = Admin
+      pass = opensuse
+      EOF
+    - docker-compose up -d
+    - wait-for-it docker:5252 -s --timeout=180 -- echo "The OBS backend is up"
+    - wait-for-it docker:80 -s --timeout=180 -- echo "The OBS frontend is up"
+    - PATH=$PWD/tests/scripts:$PATH
+  after_script:
+    - docker-compose logs > docker-compose.txt
+  artifacts:
+    paths:
+      - docker-compose.txt
+      - logs/*/*.log
+    when: always
+
+integration-test:
+  stage: test
+  extends: .boot-instance
+  script:
+    - curl http://docker/
+    - osc ls /
+    - run-parts tests
+
+tag-latest-docker-image:
+  stage: tag
+  tags:
+    - lightweight
+  image: buildah/buildah
+  variables:
+    AUTH: $CI_REGISTRY_USER:$CI_REGISTRY_PASSWORD
+    TAG_NEW: latest
+  script:
+    - |
+      for image in frontend backend worker
+      do
+        echo Tagging "$CI_REGISTRY_IMAGE/$image:$TAG_SHA"
+        skopeo copy --src-creds "$AUTH" --dest-creds "$AUTH" \
+            "docker://$CI_REGISTRY_IMAGE/$image:$TAG_SHA" \
+            "docker://$CI_REGISTRY_IMAGE/$image:$TAG_NEW"
+        echo Tagged "$CI_REGISTRY_IMAGE/$image:$TAG_NEW"
+      done
+  only:
+    variables:
+      - $CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH
diff --git a/.gitmodules b/.gitmodules
index 4de6b500f74e3775e06b603a182cf449636ccf2f..ccd724c6285fa1597e1f48261956b6356eb0dec8 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +1,4 @@
 [submodule "src/backend/build"]
 	path = src/backend/build
-	url = git://github.com/openSUSE/obs-build.git
+	url = https://gitlab.collabora.com/obs/obs-build.git
+	branch = collabora/main
diff --git a/Dockerfile.backend b/Dockerfile.backend
new file mode 100644
index 0000000000000000000000000000000000000000..5cb13ef9da0f9912e3e88c1f0243df374b867f14
--- /dev/null
+++ b/Dockerfile.backend
@@ -0,0 +1,78 @@
+FROM debian:bullseye-slim as server
+LABEL maintainer Andrej Shadura <andrew.shadura@collabora.co.uk>
+ENV LC_ALL=C.UTF-8
+ARG DEBIAN_FRONTEND=noninteractive
+ARG WORKDIR=/tmp/sources
+
+# Needs checking what’s actually needed
+RUN apt-get update \
+ && apt-get install -y \
+        apt-utils \
+        adduser \
+        ca-certificates \
+        curl \
+        diffutils \
+        dpkg-dev \
+        git \
+        locales \
+        libbssolv-perl \
+        libcompress-raw-zlib-perl \
+        libfile-sync-perl \
+        libio-compress-perl \
+        libjson-xs-perl \
+        libnet-ssleay-perl \
+        libsocket-msghdr-perl \
+        libtimedate-perl \
+        libxml-parser-perl \
+        libxml-simple-perl \
+        libxml-structured-perl \
+        libyaml-libyaml-perl \
+        make \
+        patch \
+        procps \
+        reprepro \
+        supervisor \
+        tzdata \
+        xzdec \
+        zstd
+
+COPY . $WORKDIR
+
+RUN make -C $WORKDIR/dist install
+RUN make -C $WORKDIR/src/backend install
+RUN make -C $WORKDIR/src/backend/build install
+RUN ln -sf /usr/lib/obs-build /usr/lib/obs/server/build
+
+RUN rm -rf $WORKDIR
+
+RUN mkdir -p /etc/obs
+RUN cp /usr/lib/obs/server/BSConfig.pm.template /etc/obs/BSConfig.pm
+
+RUN ln -sf /etc/obs/BSConfig.pm /usr/lib/obs/server/BSConfig.pm
+
+# Sanity check: older versions of obs-build don’t have this file
+RUN test -f /usr/lib/obs/server/build/Build/Modules.pm
+
+COPY docker/services/backend/*.conf /etc/supervisor/conf.d/
+COPY docker/ /opt/
+
+VOLUME /etc/obs
+
+RUN /opt/configure-backend-user.sh
+
+RUN mkdir -p /srv/obs/log /srv/obs/run \
+ && chmod ug=rwxt /srv/obs/run \
+ && chown obsrun:obsrun -R /srv/obs
+
+VOLUME /srv/obs
+
+ENTRYPOINT /opt/backend-docker-entrypoint.sh
+
+# serviceserver
+EXPOSE 5152
+# reposerver
+EXPOSE 5252
+# srcserver
+EXPOSE 5352
+
+HEALTHCHECK --start-period=60s CMD curl --fail http://127.0.0.1:5252/
diff --git a/Dockerfile.frontend b/Dockerfile.frontend
new file mode 100644
index 0000000000000000000000000000000000000000..044caa608c82cffaf14310b6c044088702373457
--- /dev/null
+++ b/Dockerfile.frontend
@@ -0,0 +1,39 @@
+ARG REGISTRY
+ARG TAG=latest
+ARG FRONTEND_UID=999
+FROM $REGISTRY/frontend-base:$TAG as base
+
+FROM base
+ARG WORKDIR=/tmp/sources
+ARG INSTALLDIR=/obs
+
+COPY docker/ /opt/
+
+RUN /opt/configure-frontend-user.sh "$FRONTEND_UID"
+RUN mkdir -p log tmp db/sphinx \
+ && chown -R frontend $INSTALLDIR
+
+ADD --chown=frontend:frontend src/api/ $INSTALLDIR/src/api/
+
+# The base image only has runtime dependencies, Gemfiles and pre-built assets
+# The assets and the Gemfile.lock have been overwritten by the ADD command above, restore them.
+COPY --from=base $INSTALLDIR/src/api/Gemfile.lock  $INSTALLDIR/src/api/
+COPY --from=base $INSTALLDIR/src/api/public/assets $INSTALLDIR/src/api/public/assets
+
+WORKDIR $INSTALLDIR/src/api
+
+# Add local Bootstrap 3 copy for error pages
+RUN install -D -t public/css /usr/share/javascript/bootstrap/css/bootstrap.min.css
+
+ARG NOKOGIRI_USE_SYSTEM_LIBRARIES=1
+
+RUN bundle install --jobs=$(nproc) --retry=3 \
+ && rm -rf /var/lib/gems/*/cache
+
+RUN sed -i 's|^#!/usr/bin/ruby.ruby.*$|#!/usr/bin/ruby|' bin/* script/*
+COPY docker/services/frontend/*.conf /etc/supervisor/conf.d/
+
+ENTRYPOINT /opt/frontend-docker-entrypoint.sh
+EXPOSE 3000
+
+HEALTHCHECK --start-period=60s CMD curl -f http://127.0.0.1:3000/
diff --git a/Dockerfile.frontend-base b/Dockerfile.frontend-base
new file mode 100644
index 0000000000000000000000000000000000000000..add3446e6d9122ca0e70a1965b469a3cb71e9293
--- /dev/null
+++ b/Dockerfile.frontend-base
@@ -0,0 +1,102 @@
+ARG INSTALLDIR=/obs
+ARG DEBIAN_RELEASE=bullseye
+
+FROM debian:$DEBIAN_RELEASE-slim as base
+ENV LC_ALL=C.UTF-8
+ARG DEBIAN_FRONTEND=noninteractive
+ARG INSTALLDIR
+ARG DEBIAN_RELEASE
+
+# Enable backports for sphinxsearch
+RUN [ "$DEBIAN_RELEASE" != bullseye ] || \
+    echo "deb http://deb.debian.org/debian bullseye-backports main" > /etc/apt/sources.list.d/backports.list
+
+RUN apt-get update \
+ && apt-get install -y \
+        apt-utils \
+        adduser \
+        ca-certificates \
+        curl \
+        diffutils \
+        dpkg-dev \
+        git \
+        locales \
+        libjs-bootstrap \
+        make \
+        msmtp-mta \
+        mariadb-client \
+        npm \
+        patch \
+        pkgconf \
+        ruby2.7 \
+        ruby2.7-dev \
+        ruby-bundler \
+        ruby-ffi \
+        sphinxsearch \
+        supervisor \
+        time \
+        tzdata
+
+RUN apt-get update \
+ && apt-get install -y \
+        default-libmysqlclient-dev \
+        libldap2-dev \
+        libsasl2-dev \
+        libxml2-dev \
+        libxslt1-dev \
+        zlib1g-dev
+
+ADD src/api/Gemfile src/api/Gemfile.lock $INSTALLDIR/src/api/
+WORKDIR $INSTALLDIR/src/api/
+
+# Force Ruby 2.7 no matter what
+RUN for bin in $(dpkg -L ruby | grep /usr/bin/); do \
+      ln -sf ${bin}2.7 $bin; \
+    done
+RUN echo "ruby '~> 2.7.0'" >> Gemfile
+
+# Drop the hard-coded Bundler version so we can use the distro-provided Bundler
+RUN sed -e '/BUNDLED WITH/,+1 d' Gemfile.lock \
+        -e 's/^  ruby$/  ruby '"$(ruby2.7 -v | cut -d' ' -f2)"'/' > Gemfile.lock.new; \
+    diff -u Gemfile.lock Gemfile.lock.new; \
+    mv Gemfile.lock.new Gemfile.lock
+
+ARG NOKOGIRI_USE_SYSTEM_LIBRARIES=1
+
+RUN bundle config --global without assets:development:test
+
+RUN bundle install --jobs=$(nproc) --retry=3 \
+ && rm -rf \
+    /var/lib/gems/*/cache/ \
+    /var/lib/gems/*/test/ \
+    /var/lib/gems/*/extensions/*/*/*/gem_make.out \
+    /var/lib/gems/*/extensions/*/*/*/*.log
+
+ENV RAILS_ENV=production
+ENV RAILS_LOG_TO_STDOUT=true
+
+FROM base as asset-builder
+ARG INSTALLDIR
+
+ADD src/api/ $INSTALLDIR/src/api/
+
+COPY --from=base $INSTALLDIR/src/api/Gemfile $INSTALLDIR/src/api/Gemfile.lock $INSTALLDIR/src/api/
+
+ARG BUNDLE_BUILD__SASSC=--disable-march-tune-native
+ARG NOKOGIRI_USE_SYSTEM_LIBRARIES=1
+
+RUN gem install --no-format-executable brakeman --version 5.0.2 --no-doc
+RUN gem install sassc --version 2.0.1 --no-doc
+
+RUN bundle config --local without development:test
+
+RUN bundle install --jobs=$(nproc) --retry=3
+
+RUN echo nonce > config/secret.key \
+ && DATABASE_URL=mysql2://localhost/noncedb bundle exec rake assets:precompile RAILS_GROUPS=assets
+
+FROM base
+ARG INSTALLDIR
+
+# Here, we end up with an image with runtime dependencies only, Gemfiles and pre-built assets
+COPY --from=asset-builder $INSTALLDIR/src/api/public/assets  $INSTALLDIR/src/api/public/assets
diff --git a/Dockerfile.worker b/Dockerfile.worker
new file mode 100644
index 0000000000000000000000000000000000000000..7eb78b05044dff26a6c0319a52c6a2f0ba05017d
--- /dev/null
+++ b/Dockerfile.worker
@@ -0,0 +1,28 @@
+ARG REGISTRY
+ARG TAG=latest
+FROM $REGISTRY/backend:$TAG
+
+LABEL maintainer Andrej Shadura <andrew.shadura@collabora.co.uk>
+ARG DEBIAN_FRONTEND=noninteractive
+
+# TODO: cleanup
+RUN apt-get update \
+ && apt-get install -y \
+        binutils \
+        binfmt-support \
+        cpio \
+        curl \
+        debootstrap \
+        fdisk \
+        libarchive-tools \
+        lsb-base \
+        lvm2 \
+        lzma \
+        psmisc \
+        qemu-user-static \
+        rpm
+
+RUN rm /etc/supervisor/conf.d/*
+COPY docker/services/worker/*.conf /etc/supervisor/conf.d/
+
+ENTRYPOINT /opt/worker-docker-entrypoint.sh
diff --git a/docker-compose.ahm.yml b/docker-compose.ahm.yml
deleted file mode 100644
index 423edb349c40d743af38f49b817bdb8706177ff9..0000000000000000000000000000000000000000
--- a/docker-compose.ahm.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-version: "2.1"
-services:
-  influx:
-    image: influxdb
-    environment:
-      - INFLUXDB_DB=telegraf
-      - INFLUXDB_USER=telegraf
-      - INFLUXDB_USER_PASSWORD=telegraf
-      - INFLUXDB_READ_USER=grafana
-      - INFLUXDB_READ_USER_PASSWORD=grafana
-  rabbit:
-    image: rabbitmq:3-management
-    ports:
-      - "15672:15672"
-    # This is a simple hack to give rabbitmq 10 seconds
-    # to get up and running. Otherwise grafana tries to
-    # connect too soon and crashes.
-    healthcheck:
-      test: rabbitmqctl list_queues
-      interval: 10s
-      timeout: 15s
-      retries: 5
-  telegraf:
-    image: telegraf
-    depends_on:
-      rabbit:
-        condition: service_healthy
-    volumes:
-      - ./contrib/telegraf.conf:/etc/telegraf/telegraf.conf:ro
-  grafana:
-    image: grafana/grafana
-    ports:
-      - "8000:3000"
-    depends_on:
-      - influx
diff --git a/docker-compose.ha.yml b/docker-compose.ha.yml
deleted file mode 100644
index 0bfd72ddc02d64e1ba716ec7b24b0919725be4b6..0000000000000000000000000000000000000000
--- a/docker-compose.ha.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-version: "2.1"
-services:
-  proxy:
-    image: haproxy:1.7
-    volumes:
-      - ./contrib/haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg:ro
-    ports:
-      - "8080:80"
-      - "32700:32700"
-    depends_on:
-      - frontend
-      - frontend_node_2
-  frontend_node_2:
-    image: openbuildservice/frontend
-    environment:
-      - NODE_NUMBER=2
-    volumes:
-      - .:/obs
-      - ./contrib/thinking_sphinx.yml.ha:/obs/src/api/config/thinking_sphinx.yml:ro
-      - ./contrib/Procfile.ha:/obs/src/api/Procfile
-    ports:
-      - "3001:3000"
-      - "1081:1080"
-    depends_on:
-      - db
-      - cache
-      - backend
-      - worker
diff --git a/docker-compose.minitest.yml b/docker-compose.minitest.yml
deleted file mode 100644
index de5669f1b40b76b98c28bb0fedb5680dfa6919a5..0000000000000000000000000000000000000000
--- a/docker-compose.minitest.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-version: "2.1"
-services:
-  minitest:
-    image: openbuildservice/minitest
-    build:
-      dockerfile: docker-files/Dockerfile.minitest
-      context: src/api
-    volumes:
-      - .:/obs
-    depends_on:
-      - db
-      - cache
-
diff --git a/docker-compose.selenium.yml b/docker-compose.selenium.yml
deleted file mode 100644
index 09c71c13be39b843ec1024b4c188e9f6b5d02a11..0000000000000000000000000000000000000000
--- a/docker-compose.selenium.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-version: "2.1"
-services:
-  frontend:
-    depends_on:
-      - selenium
-    environment:
-      RSPEC_HOST: frontend
-  selenium:
-    image: selenium/standalone-chrome-debug
-    environment:
-      VNC_NO_PASSWORD: 1
-    ports:
-      - 5900:5900
-
diff --git a/docker-compose.yml b/docker-compose.yml
index 90aa950244e4d2c157f621fe8cbff244e2b0057e..02fcd77334d86acf1677b305da31a6cda4f2d7c0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,44 +1,74 @@
 version: "2.1"
+
 services:
   db:
-    image: registry.opensuse.org/obs/server/unstable/container/leap151/containers/openbuildservice/mariadb
+    image: mariadb:10.6
     ports:
-      - "3306:3306"
-    command: /usr/lib/mysql/mysql-systemd-helper start
+      - "127.0.0.1:3306:3306"
+    restart: unless-stopped
+    environment:
+      MARIADB_ROOT_PASSWORD: someobs
+      MARIADB_DATABASE: obsapi
+      MARIADB_USER: obs-api
+      MARIADB_PASSWORD: someobs
+
   cache:
-    image: registry.opensuse.org/obs/server/unstable/container/leap151/containers/openbuildservice/memcached
-    ports:
-      - "11211:11211"
-    command: /usr/sbin/memcached -u memcached
+    image: memcached:1.6-alpine
+
   backend:
-    image: registry.opensuse.org/obs/server/unstable/container/leap151/containers/openbuildservice/backend
-    volumes:
-      - .:/obs
-      - ./dist/aws_credentials:/etc/obs/cloudupload/.aws/config
-      - ./dist/ec2utils.conf:/etc/obs/cloudupload/.ec2utils.conf
-      - ./dist/clouduploader.rb:/usr/bin/clouduploader
-    command: /obs/contrib/start_development_backend -d /obs
-  worker:
-    image: registry.opensuse.org/obs/server/unstable/container/leap151/containers/openbuildservice/backend
+    image: registry.gitlab.collabora.com/obs/open-build-service/backend
+    hostname: backend
+    restart: unless-stopped
     volumes:
-      - .:/obs
-    privileged: true 
+      - backend-storage:/srv/obs
+      - backend-logs:/srv/obs/log
+    environment:
+      OBS_FRONTEND_HOST: frontend
+      OBS_BACKEND_HOST: backend
+
+  frontend:
+    image: registry.gitlab.collabora.com/obs/open-build-service/frontend
     depends_on:
       - backend
-    command: /obs/contrib/start_development_worker
-  frontend:
-    image: openbuildservice/frontend
-    command: foreman start -p 3000
-    build:
-      dockerfile: docker-files/Dockerfile
-      context: src/api
     volumes:
-      - .:/obs
+      - frontend-logs:/obs/src/api/log
+      - type: tmpfs
+        target: /tmp
+        tmpfs:
+          size: 4G
+    hostname: frontend
+    restart: unless-stopped
+    environment:
+      DB_HOST: db
+      DB_PORT: 3306
+      DB_ROOT_PASSWORD: someobs
+      DB_NAME: obsapi
+      DB_USER: obs-api
+      DB_PASSWORD: someobs
+      OBS_BACKEND_HOST: backend
+      OBS_FRONTEND_WORKERS: 4
     ports:
-      - "3000:3000"
-      - "1080:1080"
+      - "127.0.0.1:3000:3000"
     depends_on:
       - db
       - cache
+
+  worker:
+    depends_on:
       - backend
-      - worker
+    image: registry.gitlab.collabora.com/obs/open-build-service/worker
+    hostname: worker
+    restart: unless-stopped
+    privileged: true
+    volumes:
+      - worker-logs:/srv/obs/log
+    environment:
+      OBS_SRC_SERVER: backend:5352
+      OBS_REPO_SERVERS: backend:5252
+      OBS_WORKER_INSTANCES: 1
+
+volumes:
+  backend-storage:
+  backend-logs:
+  frontend-logs:
+  worker-logs:
diff --git a/docker/backend-docker-entrypoint.sh b/docker/backend-docker-entrypoint.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3a09881996a98c6382e19ca00ca672f7fc738cf2
--- /dev/null
+++ b/docker/backend-docker-entrypoint.sh
@@ -0,0 +1,57 @@
+#!/bin/sh -e
+
+if [ -z "$OBS_FRONTEND_HOST" ] || [ -z "$OBS_BACKEND_HOST" ]
+then
+    echo >&2 'You need to specify OBS_FRONTEND_HOST and OBS_BACKEND_HOST'
+    exit 1
+fi
+
+if [ -d /srv/obs/run ] && [ "$(stat -c %U /srv/obs/run)" != obsrun ]
+then
+    echo "OBS files are owned by a wrong user $(stat -c %U /srv/obs/run), NOT re-owning!" >&2
+    echo "Please fix and restart." >&2
+    exit 1
+fi
+
+mkdir -p /srv/obs/log /srv/obs/run
+chmod ug=rwxt /srv/obs/run
+chown obsrun:obsrun /srv/obs/*
+
+export GNUPGHOME=/srv/obs/gnupg
+
+if [ ! -d "$GNUPGHOME" ] || [ ! -f "$GNUPGHOME/trustdb.gpg" ]
+then
+    echo "GnuPG homedir is missing!"
+    mkdir -m 0700 "$GNUPGHOME"
+    chown obsrun:obsrun "$GNUPGHOME"
+    runuser -u obsrun -- gpg --list-keys
+fi
+
+if [ ! -f /etc/obs/BSConfig.pm ]
+then
+    echo "OBS backend configuration not found, starting from scratch"
+    cp /usr/lib/obs/server/BSConfig.pm.template /etc/obs/BSConfig.pm
+fi
+
+echo "Configure OBS backend host: $OBS_BACKEND_HOST"
+sed -i "s/hostname = .*/hostname = '$OBS_BACKEND_HOST';/g" /etc/obs/BSConfig.pm
+
+echo "Configure OBS frontend host: $OBS_FRONTEND_HOST"
+sed -i "s/frontend = .*/frontend = '$OBS_FRONTEND_HOST';/g" /etc/obs/BSConfig.pm
+
+for arch in ${OBS_ARCHES:-x86_64 i586 armv7hl aarch64}
+do
+    for template in /opt/services/backend/*@.conf.in
+    do
+        conf=$(echo $(basename $template) | sed -e "s|@|@$arch|" -e 's|.in$||')
+        sed -e "s|@ARCH@|$arch|g" $template > /etc/supervisor/conf.d/$conf
+    done
+done
+
+if [ -z "$(/opt/get-obs-config sign)" ]
+then
+    echo Signer not configured, disabling.
+    mv /etc/supervisor/conf.d/obssigner.conf /etc/supervisor/conf.d/obssigner.conf.disabled
+fi
+
+exec /usr/bin/supervisord -n
diff --git a/docker/configure-app.sh b/docker/configure-app.sh
new file mode 100755
index 0000000000000000000000000000000000000000..57193d7187c7b1c3b317c3a94fe09fbe7549fefe
--- /dev/null
+++ b/docker/configure-app.sh
@@ -0,0 +1,50 @@
+#!/bin/sh -e
+
+if [ -z "$OBS_BACKEND_HOST" ]; then
+    echo >&2 'error: server backend is unavailable and hostname option is not specified '
+    echo >&2 '  You need to specify OBS_BACKEND_HOST'
+    exit 1
+fi
+
+for d in log tmp db/sphinx
+do
+    mkdir -p $d
+    chown -R frontend $d
+done
+
+# Allow overriding the secret key
+if [ -f /run/secrets/secretkey ]
+then
+    ln -sf /run/secrets/secretkey config/secret.key
+fi
+
+if [ ! -r config/secret.key ]
+then
+    bundle exec rake secret > config/secret.key
+fi
+
+for d in options.yml thinking_sphinx.yml
+do
+    [ -r config/$d ] || cp config/$d.example config/$d
+done
+
+if [ ! -z "$OBS_BACKEND_HOST" ]; then
+    sed -i s/"source_host: localhost"/"source_host: ${OBS_BACKEND_HOST}"/g config/options.yml
+fi
+
+if [ ! -z "$OBS_LOG_LEVEL" ]
+then
+    sed -i "s/config.log_level = .*/config.log_level = :$OBS_LOG_LEVEL/g" config/environments/production.rb
+fi
+
+# Set up msmtp if a configuration is supplied
+if [ -f /run/secrets/msmtprc ]
+then
+    ln -sf /run/secrets/msmtprc /etc/msmtprc
+fi
+
+# Set up SSO auth if a configuration is supplied
+if [ -f /run/secrets/ssoauth ]
+then
+    ln -sf /run/secrets/ssoauth config/auth.yml
+fi
diff --git a/docker/configure-backend-user.sh b/docker/configure-backend-user.sh
new file mode 100755
index 0000000000000000000000000000000000000000..dfd1827ba747ad999ce15d852a7675e4365a4398
--- /dev/null
+++ b/docker/configure-backend-user.sh
@@ -0,0 +1,25 @@
+#!/bin/sh -e
+
+if ! getent group obsrun > /dev/null
+then
+    addgroup --system --gid 999 obsrun
+fi
+
+if ! getent passwd obsrun > /dev/null
+then
+    adduser --system --uid 999 \
+        --ingroup obsrun --shell /bin/false \
+        --home /usr/lib/obs --no-create-home obsrun
+    usermod -c "User for build service backend" obsrun
+fi
+
+if ! getent passwd obsservicerun > /dev/null
+then
+    adduser --system --uid 998 \
+        --ingroup obsrun --shell /bin/false \
+        --home /usr/lib/obs/server --no-create-home obsservicerun
+    usermod -c "User for obs source service server" obsservicerun
+fi
+
+mkdir -p /srv/obs/repos
+chown obsrun:obsrun /srv/obs/repos
diff --git a/docker/configure-db.sh b/docker/configure-db.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c12b7e9e0c00f78478b0d0c7931d552d9f9993ad
--- /dev/null
+++ b/docker/configure-db.sh
@@ -0,0 +1,34 @@
+#!/bin/sh -e
+
+if [ -z "$DB_HOST" -o -z "$DB_ROOT_PASSWORD" -o -z "$DB_NAME" -o -z "$DB_USER" -o -z "$DB_PASSWORD" ]; then
+    echo >&2 'error: database is uninitialized and password option is not specified or OBS'
+    echo >&2 '  You need to specify DB_HOST, DB_ROOT_PASSWORD, DB_NAME, DB_USER and DB_PASSWORD'
+    exit 1
+fi
+
+cat > config/database.yml <<EOF
+production:
+  adapter: mysql2
+  host: $DB_HOST
+  port: 3306
+  database: $DB_NAME
+  username: $DB_USER
+  password: $DB_PASSWORD
+  encoding: utf8mb4
+  collation: utf8mb4_unicode_ci
+  timeout: 15
+  pool: 30
+EOF
+
+rake() {
+    runuser -u frontend -- bundle exec rake "$@"
+}
+
+if ! rake db:migrate:status
+then
+    rake db:create || true
+    rake db:setup
+    rake writeconfiguration
+else
+    rake db:migrate:with_data
+fi
diff --git a/docker/configure-frontend-user.sh b/docker/configure-frontend-user.sh
new file mode 100755
index 0000000000000000000000000000000000000000..96ae65453125a5063561422d289ef617e1e9ee34
--- /dev/null
+++ b/docker/configure-frontend-user.sh
@@ -0,0 +1,16 @@
+#!/bin/sh -e
+
+UID="${1:-999}"
+
+if ! getent group frontend > /dev/null
+then
+    addgroup --system --gid $UID frontend
+fi
+
+if ! getent passwd frontend > /dev/null
+then
+    adduser --system --uid $UID \
+        --ingroup frontend --shell /bin/false \
+        --home /obs --no-create-home frontend
+    usermod -c "User for build service frontend" frontend
+fi
diff --git a/docker/configure-sso.py b/docker/configure-sso.py
new file mode 100755
index 0000000000000000000000000000000000000000..13b373fa582df4bf7dbe9ea7a7a49dc1de55ee13
--- /dev/null
+++ b/docker/configure-sso.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python3
+
+import yaml
+import os
+
+CONFIG_LOCATION='config/auth.yml'
+
+def parse_method(method: str):
+    for k, v in os.environ.items():
+        prefix = 'OBS_SSO_' + method.upper().replace('-', '_') + '_'
+        if k.startswith(prefix):
+            opt = k.replace(prefix, '').lower()
+            yield opt, v
+
+def reorder_options(options: dict):
+    new_options = {}
+    client_options = {}
+    for k, v in options.items():
+        if k.startswith('client_options_'):
+            client_options[k.replace('client_options_', '')] = v
+        else:
+            new_options[k] = v
+    if client_options:
+        new_options['client_options'] = client_options
+    return new_options
+
+def generate_yaml():
+    methods = os.environ['OBS_SSO_METHODS'].split()
+    config = {}
+    for method in methods:
+        options = reorder_options(dict(parse_method(method)))
+        config[method] = options
+    with open(CONFIG_LOCATION, 'w') as f:
+        yaml.safe_dump(config, stream=f)
+
+if __name__ == "__main__":
+    if os.environ.get('OBS_SSO_ENABLED') == 'true':
+        generate_yaml()
diff --git a/docker/frontend-docker-entrypoint.sh b/docker/frontend-docker-entrypoint.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1c64417c1a61d087b83329f642491e37b2432e29
--- /dev/null
+++ b/docker/frontend-docker-entrypoint.sh
@@ -0,0 +1,16 @@
+#!/bin/sh -e
+
+cd /obs/src/api
+
+# Make sure there are no stale files from previous runs
+rm -rfv tmp/pids/*
+chown -R frontend log tmp
+
+/opt/configure-app.sh
+/opt/configure-db.sh
+#/opt/configure-sso.py
+
+: ${OBS_FRONTEND_WORKERS:=4}
+export OBS_FRONTEND_WORKERS
+
+exec /usr/bin/supervisord -n
diff --git a/docker/get-obs-config b/docker/get-obs-config
new file mode 100755
index 0000000000000000000000000000000000000000..72c0edfce7cfe860c741171d41ece9d7c14d110a
--- /dev/null
+++ b/docker/get-obs-config
@@ -0,0 +1,7 @@
+#!/usr/bin/perl
+
+require "/etc/obs/BSConfig.pm";
+
+my $key = $ARGV[0];
+
+print ${"BSConfig::$key"};
diff --git a/docker/services/backend/obsclouduploadserver.conf b/docker/services/backend/obsclouduploadserver.conf
new file mode 100644
index 0000000000000000000000000000000000000000..9edab6b179c49c711e8a3a78a334a4dbea9a8788
--- /dev/null
+++ b/docker/services/backend/obsclouduploadserver.conf
@@ -0,0 +1,11 @@
+[program:clouduploadserver]
+command=/usr/lib/obs/server/bs_clouduploadserver
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/clouduploadserver.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsclouduploadworker.conf b/docker/services/backend/obsclouduploadworker.conf
new file mode 100644
index 0000000000000000000000000000000000000000..7dc21bc16d5f55aaa2b75ef1d8f031311a336b20
--- /dev/null
+++ b/docker/services/backend/obsclouduploadworker.conf
@@ -0,0 +1,11 @@
+[program:clouduploadworker]
+command=/usr/lib/obs/server/bs_clouduploadworker
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/clouduploadworker.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsdeltastore.conf b/docker/services/backend/obsdeltastore.conf
new file mode 100644
index 0000000000000000000000000000000000000000..95642a4e24952ee2f3cf7f546831aa94f581e2e1
--- /dev/null
+++ b/docker/services/backend/obsdeltastore.conf
@@ -0,0 +1,11 @@
+[program:deltastore]
+command=/usr/lib/obs/server/bs_deltastore
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/deltastore.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsdispatcher.conf b/docker/services/backend/obsdispatcher.conf
new file mode 100644
index 0000000000000000000000000000000000000000..f9ff1140b620b1a0fd073f33a2ec2fed1fe1b45f
--- /dev/null
+++ b/docker/services/backend/obsdispatcher.conf
@@ -0,0 +1,11 @@
+[program:dispatcher]
+command=/usr/lib/obs/server/bs_dispatch
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/dispatcher.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsdodup.conf b/docker/services/backend/obsdodup.conf
new file mode 100644
index 0000000000000000000000000000000000000000..c17e785478809b5d1f737623a0c09194b2529e67
--- /dev/null
+++ b/docker/services/backend/obsdodup.conf
@@ -0,0 +1,11 @@
+[program:dodup]
+command=/usr/lib/obs/server/bs_dodup
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/dodup.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obspublisher.conf b/docker/services/backend/obspublisher.conf
new file mode 100644
index 0000000000000000000000000000000000000000..d04b004a258916b63a84da93d445cbf1f56cdec0
--- /dev/null
+++ b/docker/services/backend/obspublisher.conf
@@ -0,0 +1,11 @@
+[program:publisher]
+command=/usr/lib/obs/server/bs_publish
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/publisher.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsrepserver.conf b/docker/services/backend/obsrepserver.conf
new file mode 100644
index 0000000000000000000000000000000000000000..f769b8da16a90b6d0bde1a2a22e1b9a4e5d4845e
--- /dev/null
+++ b/docker/services/backend/obsrepserver.conf
@@ -0,0 +1,11 @@
+[program:repserver]
+command=/usr/lib/obs/server/bs_repserver
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/rep_server.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsscheduler@.conf.in b/docker/services/backend/obsscheduler@.conf.in
new file mode 100644
index 0000000000000000000000000000000000000000..69727773f55197bbc2421dec84ca2a9110a3c76a
--- /dev/null
+++ b/docker/services/backend/obsscheduler@.conf.in
@@ -0,0 +1,11 @@
+[program:scheduler@@ARCH@]
+command=/usr/lib/obs/server/bs_sched @ARCH@
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/scheduler_@ARCH@.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsservice.conf b/docker/services/backend/obsservice.conf
new file mode 100644
index 0000000000000000000000000000000000000000..4aa53500fae1d99dff5a95070ee081d09e191112
--- /dev/null
+++ b/docker/services/backend/obsservice.conf
@@ -0,0 +1,11 @@
+[program:srcservice]
+command=/usr/lib/obs/server/bs_service
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/src_service.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obsservicedispatch.conf b/docker/services/backend/obsservicedispatch.conf
new file mode 100644
index 0000000000000000000000000000000000000000..86bf22c8809e01e9ec50fd63bfabfcf9f7f3d6d4
--- /dev/null
+++ b/docker/services/backend/obsservicedispatch.conf
@@ -0,0 +1,11 @@
+[program:servicedispatch]
+command=/usr/lib/obs/server/bs_servicedispatch
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/servicedispatch.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obssigner.conf b/docker/services/backend/obssigner.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6f46b23fd78fedc88e7969dd4c3d5c6b0c745153
--- /dev/null
+++ b/docker/services/backend/obssigner.conf
@@ -0,0 +1,11 @@
+[program:signer]
+command=/usr/lib/obs/server/bs_signer
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/signer.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obssrcserver.conf b/docker/services/backend/obssrcserver.conf
new file mode 100644
index 0000000000000000000000000000000000000000..ab9f7f152fe7c9e16160b95ceb4b7ff653187931
--- /dev/null
+++ b/docker/services/backend/obssrcserver.conf
@@ -0,0 +1,11 @@
+[program:srcserver]
+command=/usr/lib/obs/server/bs_srcserver
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/src_server.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/backend/obswarden.conf b/docker/services/backend/obswarden.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6a08514ff843d9df4ef0e63f4929e8ec9c921b3c
--- /dev/null
+++ b/docker/services/backend/obswarden.conf
@@ -0,0 +1,11 @@
+[program:warden]
+command=/usr/lib/obs/server/bs_warden
+directory=/usr/lib/obs/server/
+stdout_logfile=/srv/obs/log/warden.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+
diff --git a/docker/services/frontend/clockwork.conf b/docker/services/frontend/clockwork.conf
new file mode 100644
index 0000000000000000000000000000000000000000..82c1dd8047ec2c0028d27cb97b5738448ffe3d3b
--- /dev/null
+++ b/docker/services/frontend/clockwork.conf
@@ -0,0 +1,10 @@
+[program:clockwork]
+command=bundle exec clockworkd -c config/clock.rb run
+directory=/obs/src/api
+stdout_logfile=/obs/src/api/log/%(program_name)s.log
+redirect_stderr=true
+autostart=true
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+user=frontend
diff --git a/docker/services/frontend/delayed-jobs.conf b/docker/services/frontend/delayed-jobs.conf
new file mode 100644
index 0000000000000000000000000000000000000000..4fd86d89922ef0d9ed9f7c0c7dd2d9dc9d69e3ce
--- /dev/null
+++ b/docker/services/frontend/delayed-jobs.conf
@@ -0,0 +1,10 @@
+[program:delayed-jobs]
+command=bundle exec script/delayed_job.api.rb run
+directory=/obs/src/api
+stdout_logfile=/obs/src/api/log/%(program_name)s-stdout.log
+redirect_stderr=true
+autostart=true
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+user=frontend
diff --git a/docker/services/frontend/search.conf b/docker/services/frontend/search.conf
new file mode 100644
index 0000000000000000000000000000000000000000..46fc79a76d6ae6e0ee7d8e09c1a94cb254770583
--- /dev/null
+++ b/docker/services/frontend/search.conf
@@ -0,0 +1,10 @@
+[program:search]
+command=bundle exec rake ts:rebuild NODETACH=true
+directory=/obs/src/api
+stdout_logfile=/obs/src/api/log/%(program_name)s-stdout.log
+redirect_stderr=true
+autostart=true
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+user=frontend
diff --git a/docker/services/frontend/web.conf b/docker/services/frontend/web.conf
new file mode 100644
index 0000000000000000000000000000000000000000..249d519a847eff37a62a93b756406323065490e8
--- /dev/null
+++ b/docker/services/frontend/web.conf
@@ -0,0 +1,10 @@
+[program:web]
+command=bundle exec puma -p 3000 -w %(ENV_OBS_FRONTEND_WORKERS)s
+directory=/obs/src/api
+stdout_logfile=/obs/src/api/log/%(program_name)s-stdout.log
+redirect_stderr=true
+autostart=true
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+user=frontend
diff --git a/docker/services/worker/obsworker.conf b/docker/services/worker/obsworker.conf
new file mode 100644
index 0000000000000000000000000000000000000000..1129707f4e11a0ee3879fa0ca1c0d56049fa2ed6
--- /dev/null
+++ b/docker/services/worker/obsworker.conf
@@ -0,0 +1,13 @@
+[program:obsworker]
+command=%(ENV_OBS_WORKER_PATH)s/bs_worker --hardstatus --root /var/cache/build/root_%(process_num)d --statedir /var/cache/build/state_%(process_num)d --id %(ENV_OBS_WORKER_NAME)s:%(process_num)d %(ENV_OBS_WORKER_OPT)s
+process_name=%(program_name)s_%(process_num)d
+directory=%(ENV_OBS_WORKER_PATH)s
+stdout_logfile=/srv/obs/log/worker_%(process_num)d.log
+redirect_stderr=true
+autostart=True
+priority=1
+stopsignal=KILL
+killasgroup=true
+stopasgroup=true
+numprocs=%(ENV_OBS_WORKER_INSTANCES)s
+numprocs_start=1
diff --git a/docker/worker-docker-entrypoint.sh b/docker/worker-docker-entrypoint.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f873c026658e567f2a7f5690c375537b321a5b5e
--- /dev/null
+++ b/docker/worker-docker-entrypoint.sh
@@ -0,0 +1,74 @@
+#!/bin/sh -e
+
+obsrundir=/run/obs
+workerdir=/var/cache/build
+workerbootdir="$workerdir/boot"
+obslogdir=/var/log/obs
+
+mkdir -p "$obsrundir"
+
+: ${OBS_REPO_SERVERS:=obs-server:5252}
+
+repo_param=
+for i in $OBS_REPO_SERVERS
+do
+    repo_param="$REPO_PARAM --reposerver http://$i"
+    WORKER_CODE="http://$i"
+done
+
+: ${OBS_WORKER_NICE_LEVEL:=18}
+
+OBS_WORKER_OPT="--hardstatus $repo_param ${OBS_WORKER_JOBS:+--jobs $OBS_WORKER_JOBS}\
+ ${OBS_WORKER_CLEANUP_CHROOT:+--cleanup-chroot}\
+ ${OBS_WORKER_WIPE_AFTER_BUILD:+--wipeafterbuild}\
+ ${OBS_SRC_SERVER:+--srcserver $OBS_SRC_SERVER}\
+ ${OBS_ARCH:+--arch $OBS_ARCH} ${OBS_WORKER_OPT}"
+
+export OBS_WORKER_OPT
+
+: ${OBS_WORKER_NAME:=$(hostname)}
+
+export OBS_WORKER_NAME
+
+: ${OBS_WORKER_INSTANCES:=$(nproc)}
+
+export OBS_WORKER_INSTANCES
+
+OBS_WORKER_PATH=/usr/lib/obs/server
+
+update_worker() {
+    echo "Fetching initial worker code from $WORKER_CODE/getworkercode"
+    mkdir -p "$workerbootdir"
+    cd "$workerbootdir"
+    for retry in $(seq 10)
+    do
+        if curl --fail --silent --show-error "$WORKER_CODE/getworkercode" | cpio --extract
+        then
+            ln -sfn . XML
+            chmod 755 bs_worker
+            echo "Worker code downloaded." >&2
+            ls -l
+            return 0
+        fi
+        # we need to wait for rep server maybe
+        echo "WARNING: Could not reach rep server $WORKER_CODE. Trying again." >&2
+        sleep 10
+    done
+    echo "ERROR: Unable to reach rep server $WORKER_CODE!" >&2
+    return 1
+}
+
+if [ -n "$WORKER_CODE" ]
+then
+    update_worker
+    OBS_WORKER_PATH="$workerbootdir"
+fi
+
+export OBS_WORKER_PATH
+
+for i in $(seq 1 $OBS_WORKER_INSTANCES)
+do
+    mkdir -p $workerdir/root_$i $workerdir/state_$i
+done
+
+exec nice -n "$OBS_WORKER_NICE_LEVEL" /usr/bin/supervisord -n
diff --git a/docs/api/api/obs.rng b/docs/api/api/obs.rng
index b491ee0b6d17ac47a36d8b2f8f2b191e2672082e..40acb41b32509e632134d214b9cbd677de3bba9f 100644
--- a/docs/api/api/obs.rng
+++ b/docs/api/api/obs.rng
@@ -151,6 +151,14 @@
     </element>
   </define>
 
+  <define ns="" name="allowbuilddep-element">
+    <element name="allowbuilddep">
+      <attribute name="name">
+        <data type="string" />
+      </attribute>
+    </element>
+  </define>
+
   <define ns="" name="group-element">
     <element name="group">
       <attribute name="groupid">
diff --git a/docs/api/api/project.rng b/docs/api/api/project.rng
index bff66d3f7e1b2a8e38a40218005930e5b81abaac..ef101dbf23991b04991ad9d9f6acaf1791bb44f6 100644
--- a/docs/api/api/project.rng
+++ b/docs/api/api/project.rng
@@ -109,6 +109,10 @@
           </element>
         </optional>
 
+        <zeroOrMore>
+          <ref name="allowbuilddep-element"/>
+        </zeroOrMore>
+
         <zeroOrMore>
           <ref name="person-element"/>
         </zeroOrMore>
diff --git a/src/api/Gemfile b/src/api/Gemfile
index 5b38df49d810849fb2409641ba176127ebf60178..c1a70df6ec04e6f64eacae1b19a5d48167a11c0d 100644
--- a/src/api/Gemfile
+++ b/src/api/Gemfile
@@ -98,6 +98,8 @@ group :development, :production do
   gem 'ruby-ldap', require: false
   # to have better logs
   gem 'lograge'
+  # Use Puma as the app server (rails 5 default)
+  gem 'puma', '~> 4.0'
 end
 
 group :production do
@@ -175,8 +177,6 @@ group :development, :test do
   gem 'single_test'
   # to find n+1 queries
   gem 'bullet'
-  # Use Puma as the app server (rails 5 default)
-  gem 'puma', '~> 4.0'
   # to drive headless chrome
   gem 'selenium-webdriver'
   # scan for security vulnerability (circleci only, do not touch)
diff --git a/src/api/app/assets/javascripts/webui/application.js.erb b/src/api/app/assets/javascripts/webui/application.js.erb
index 7e72e1475359bdd20d2015057ded445c4fa997a1..1c048d52f9f1f1f1dbcaca2a4e2953c1c80a5c6d 100644
--- a/src/api/app/assets/javascripts/webui/application.js.erb
+++ b/src/api/app/assets/javascripts/webui/application.js.erb
@@ -164,19 +164,6 @@ function resizeMonitorBoxes() {
 }
 
 function callPiwik() {
-    var u = (("https:" == document.location.protocol) ? "https://beans.opensuse.org/piwik/" : "http://beans.opensuse.org/piwik/");
-    _paq.push(['setSiteId', 8]);
-    _paq.push(['setTrackerUrl', u + 'piwik.php']);
-    _paq.push(['trackPageView']);
-    _paq.push(['setDomains', ["*.opensuse.org"]]);
-    var d = document,
-        g = d.createElement('script'),
-        s = d.getElementsByTagName('script')[0];
-    g.type = 'text/javascript';
-    g.defer = true;
-    g.async = true;
-    g.src = u + 'piwik.js';
-    s.parentNode.insertBefore(g, s);
 }
 
 $(document).ajaxSend(function (event, request, settings) {
diff --git a/src/api/app/models/allowbuilddep.rb b/src/api/app/models/allowbuilddep.rb
new file mode 100644
index 0000000000000000000000000000000000000000..c774b5ecf84b0cfc83d2029dfb823c5b57bdc624
--- /dev/null
+++ b/src/api/app/models/allowbuilddep.rb
@@ -0,0 +1,3 @@
+class Allowbuilddep < ActiveRecord::Base
+  belongs_to :project, foreign_key: :db_project_id, inverse_of: :allowbuilddeps
+end
diff --git a/src/api/app/models/project.rb b/src/api/app/models/project.rb
index 3c49ce72878dc22584e2289183463c63f5969d79..64d0e9138df00e41a943ff60ac3dbc7091e073d2 100644
--- a/src/api/app/models/project.rb
+++ b/src/api/app/models/project.rb
@@ -43,6 +43,8 @@ class Project < ApplicationRecord
   has_many :issues, through: :packages
   has_many :attribs, dependent: :destroy
 
+  has_many :allowbuilddeps, :dependent => :destroy, foreign_key: :db_project_id
+
   has_many :repositories, dependent: :destroy, foreign_key: :db_project_id
   has_many :release_targets, through: :repositories
   has_many :target_repositories, through: :release_targets
@@ -573,6 +575,12 @@ class Project < ApplicationRecord
     !errors.any?
   end
 
+  def builddep_allowed?(project_name)
+    allowbuilddeps.any? do |abd|
+      abd.name == project_name
+    end
+  end
+
   def update_from_xml!(xmlhash, force = nil)
     Project::UpdateFromXmlCommand.new(self).run(xmlhash, force)
   end
@@ -1428,9 +1436,10 @@ class Project < ApplicationRecord
       # ignore this for remote targets
       if target_project.class == Project &&
          target_project.disabled_for?('access', nil, nil) &&
-         !FlagHelper.xml_disabled_for?(request_data, 'access')
+         !FlagHelper.xml_disabled_for?(request_data, 'access') &&
+         !target_project.builddep_allowed?(project_name)
         return {
-          error: "Project links work only when both projects have same read access protection level: #{project_name} -> #{target_project_name}"
+          error: "Project #{project_name} depends on restricted project #{target_project_name}"
         }
       end
       logger.debug "Project #{project_name} link checked against #{target_project_name} projects permission"
@@ -1448,8 +1457,10 @@ class Project < ApplicationRecord
           begin
             target_project = Project.get_by_name(target_project_name)
             # user can access tprj, but backend would refuse to take binaries from there
-            if target_project.class == Project && target_project.disabled_for?('access', nil, nil)
-              return { error: "The current backend implementation is not using binaries from read access protected projects #{target_project_name}" }
+            if target_project.class == Project &&
+               target_project.disabled_for?('access', nil, nil) &&
+               !target_project.builddep_allowed?(project_name)
+              return { error: "Trying to use binaries from read access protected project #{target_project_name}" }
             end
           rescue UnknownObjectError
             return { error: "A project with the name #{target_project_name} does not exist. Please update the repository path elements." }
diff --git a/src/api/app/models/project/update_from_xml_command.rb b/src/api/app/models/project/update_from_xml_command.rb
index a566a69082ada3ab4e0e74ca97f2affdab9d82d9..8cecadee24f1afe144820e8320355ddff5966b06 100644
--- a/src/api/app/models/project/update_from_xml_command.rb
+++ b/src/api/app/models/project/update_from_xml_command.rb
@@ -56,6 +56,7 @@ class Project
       end
 
       update_repositories(xmlhash, force)
+      update_allowbuilddep_from_xml(xmlhash)
     end
 
     private
@@ -142,6 +143,13 @@ class Project
       project.maintained_projects.delete(olds.values)
     end
 
+  def update_allowbuilddep_from_xml(xmlhash)
+    project.allowbuilddeps.destroy_all
+    xmlhash.elements('allowbuilddep') do |abd|
+      project.allowbuilddeps.create(name: abd['name'])
+    end
+  end
+
     def update_repositories(xmlhash, force)
       fill_repo_cache
 
diff --git a/src/api/app/views/models/_project.xml.builder b/src/api/app/views/models/_project.xml.builder
index 75b41d237842d90bdd209aae4d56019622bfb5d5..c71ab54a1687c86948218ec5cd0f726c568c4b35 100644
--- a/src/api/app/views/models/_project.xml.builder
+++ b/src/api/app/views/models/_project.xml.builder
@@ -19,6 +19,11 @@ xml.project(project_attributes) do
 
   my_model.render_relationships(xml)
 
+  adbs = my_model.allowbuilddeps.sort { |a, b| b.name <=> a.name }
+  adbs.each do |adb|
+    xml.allowbuilddep(name: adb.name)
+  end
+
   repos = my_model.repositories.preload(:download_repositories, :release_targets, :hostsystem, path_elements: :link).not_remote.order(name: :desc)
   FlagHelper.render(my_model, xml)
 
@@ -54,10 +59,13 @@ xml.project(project_attributes) do
       repo.path_elements.includes(:link).each do |pe|
         if pe.link.remote_project_name.present?
           project_name = pe.link.project.name + ':' + pe.link.remote_project_name
-        else
+          xml_repository.path(:project => project_name, :repository => pe.link.name)
+        elsif pe.link.project
           project_name = pe.link.project.name
+          xml_repository.path(project: project_name, repository: pe.link.name)
+        else
+          xml_repository.path(project: "HIDDEN", repository: pe.link.name)
         end
-        xml_repository.path(project: project_name, repository: pe.link.name)
       end
       repo.repository_architectures.joins(:architecture).pluck('architectures.name').each do |arch|
         xml_repository.arch arch
diff --git a/src/api/config/environments/production.rb b/src/api/config/environments/production.rb
index bf5bc55254076e42c2acb015f24d15cb566f2357..72d510d2097ed7313c1255b9ecd279830e434434 100644
--- a/src/api/config/environments/production.rb
+++ b/src/api/config/environments/production.rb
@@ -37,7 +37,7 @@ OBSApi::Application.configure do
   config.action_controller.perform_caching = true
 
   # Disable Rails's static asset server (Apache or nginx will already do this)
-  config.public_file_server.enabled = false
+  config.public_file_server.enabled = true
 
   # Compress JavaScripts and CSS
   config.assets.compress = true
diff --git a/src/api/config/options.yml.example b/src/api/config/options.yml.example
index bc98f94f9801f532b1b75db8bcf970e5ca6b26d8..1a13f782fceccf5a9854ab25008b06a3fcd18d9a 100644
--- a/src/api/config/options.yml.example
+++ b/src/api/config/options.yml.example
@@ -199,6 +199,7 @@ default: &default
 
 production:
   <<: *default
+  memcached_host: cache
 
 test:
   <<: *default
@@ -209,4 +210,3 @@ development:
   <<: *default
   source_host: backend
   memcached_host: cache
-
diff --git a/src/api/db/migrate/20191011000000_create_allowbuilddeps.rb b/src/api/db/migrate/20191011000000_create_allowbuilddeps.rb
new file mode 100644
index 0000000000000000000000000000000000000000..acf2f28a567163a4e748d8e248441eba9c184df5
--- /dev/null
+++ b/src/api/db/migrate/20191011000000_create_allowbuilddeps.rb
@@ -0,0 +1,13 @@
+class CreateAllowbuilddeps < ActiveRecord::Migration[4.2]
+  def self.up
+    create_table :allowbuilddeps do |t|
+      t.integer :db_project_id, :null => false
+      t.string  :name, :null => false
+      t.index [ :db_project_id, :name ]
+    end
+  end
+
+  def self.down
+    drop_table :allowbuilddeps
+  end
+end
diff --git a/src/api/db/structure.sql b/src/api/db/structure.sql
index 05c22c6a82f18334dee1d12a20161075328b82a9..c5f668f909d0f5d1285222c5ee6435c31d9fae5e 100644
--- a/src/api/db/structure.sql
+++ b/src/api/db/structure.sql
@@ -24,6 +24,14 @@ CREATE TABLE `ar_internal_metadata` (
   PRIMARY KEY (`key`)
 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
 
+CREATE TABLE `allowbuilddeps` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `db_project_id` int(11) NOT NULL,
+  `name` varchar(255) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `index_allowbuilddeps_on_db_project_id_and_name` (`db_project_id`,`name`(191))
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
 CREATE TABLE `architectures` (
   `id` int(11) NOT NULL AUTO_INCREMENT,
   `name` varchar(255) CHARACTER SET utf8 NOT NULL,
@@ -1456,6 +1464,7 @@ INSERT INTO `schema_migrations` (version) VALUES
 ('20190215131711'),
 ('20190328131711'),
 ('20190412130831'),
-('20190520130009');
+('20190520130009'),
+('20191011000000');
 
 
diff --git a/src/api/public/402.html b/src/api/public/402.html
index 6f3891bfdf8c997dd0ba37b9ad7c936fb2e75921..b1accb27384b415f60dd7974bacdb665d8cfbb4b 100644
--- a/src/api/public/402.html
+++ b/src/api/public/402.html
@@ -5,7 +5,7 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <title>Error 402</title>
   
-  <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
+  <link rel="stylesheet" href="/css/bootstrap.min.css">
   <style type="text/css">
     html,
     body {
diff --git a/src/api/public/403.html b/src/api/public/403.html
index eb732a2b69088592a54b5af1f56a0057007bfcac..17e9c41b95b0cb47f7116ae146a85afdde5e63c9 100644
--- a/src/api/public/403.html
+++ b/src/api/public/403.html
@@ -5,7 +5,7 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <title>Error 403</title>
   
-  <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
+  <link rel="stylesheet" href="/css/bootstrap.min.css">
   <style type="text/css">
     html,
     body {
diff --git a/src/api/public/404.html b/src/api/public/404.html
index ea6d8b89455699c22afb3f4d9626c1c0269e688f..d0d4c591dd0b65e4833a52d6cb07e69d2b33f32a 100644
--- a/src/api/public/404.html
+++ b/src/api/public/404.html
@@ -5,7 +5,7 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <title>Error 404</title>
   
-  <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
+  <link rel="stylesheet" href="/css/bootstrap.min.css">
   <style type="text/css">
     html,
     body {
diff --git a/src/api/public/408.html b/src/api/public/408.html
index 94bd91d920ec953fdea1521a66191e7f1d16feab..9ba919dfc086f4d010b41f0442de571982c2ca39 100644
--- a/src/api/public/408.html
+++ b/src/api/public/408.html
@@ -5,7 +5,7 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <title>Error 408</title>
   
-  <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
+  <link rel="stylesheet" href="/css/bootstrap.min.css">
   <style type="text/css">
     html,
     body {
diff --git a/src/api/public/500.html b/src/api/public/500.html
index 056fe80c587c56b1a40f943127509b1bf2bc0c03..d2ba2a012d37fb0b1368a7d23ebccd480ca3b2a6 100644
--- a/src/api/public/500.html
+++ b/src/api/public/500.html
@@ -5,7 +5,7 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <title>Error 500</title>
   
-  <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
+  <link rel="stylesheet" href="/css/bootstrap.min.css">
   <style type="text/css">
     html,
     body {
diff --git a/src/api/test/unit/project_test.rb b/src/api/test/unit/project_test.rb
index d257a0bbce6dcd6c595c57f28a72d4342a2ba0cc..ff58d60bf2e55138a53d797a48694cb0d2b8180c 100644
--- a/src/api/test/unit/project_test.rb
+++ b/src/api/test/unit/project_test.rb
@@ -939,9 +939,9 @@ class ProjectTest < ActiveSupport::TestCase
       </project>
     XML
 
-    actual = Project.validate_maintenance_xml_attribute(Xmlhash.parse(xml))
     expected = { error: 'No write access to maintained project Apache' }
-    assert_equal actual, expected
+    actual = Project.validate_maintenance_xml_attribute(Xmlhash.parse(xml))
+    assert_equal expected, actual
   end
 
   test 'validate_maintenance_xml_attribute returns no error if User can modify target project' do
@@ -956,7 +956,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = Project.validate_maintenance_xml_attribute(Xmlhash.parse(xml))
-    assert_equal actual, {}
+    assert_equal({}, actual)
   end
 
   test 'validate_link_xml_attribute returns no error if target project is not disabled' do
@@ -972,7 +972,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = Project.validate_link_xml_attribute(Xmlhash.parse(xml), project.name)
-    assert_equal actual, {}
+    assert_equal({}, actual)
   end
 
   test 'validate_link_xml_attribute returns an error if target project access is disabled' do
@@ -990,9 +990,9 @@ class ProjectTest < ActiveSupport::TestCase
     flag = project.add_flag('access', 'disable')
     flag.save
 
+    expected = { error: 'Project the_project depends on restricted project home:Iggy' }
     actual = Project.validate_link_xml_attribute(Xmlhash.parse(xml), 'the_project')
-    expected = { error: 'Project links work only when both projects have same read access protection level: the_project -> home:Iggy' }
-    assert_equal actual, expected
+    assert_equal expected, actual
   end
 
   test 'validate_repository_xml_attribute returns no error if project access is not disabled' do
@@ -1007,8 +1007,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = Project.validate_repository_xml_attribute(Xmlhash.parse(xml), 'other_project')
-    expected = {}
-    assert_equal actual, expected
+    assert_equal({}, actual)
   end
 
   test 'returns an error if repository access is disabled' do
@@ -1025,9 +1024,29 @@ class ProjectTest < ActiveSupport::TestCase
       </project>
     XML
 
+    expected = { error: 'Trying to use binaries from read access protected project home:Iggy' }
     actual = Project.validate_repository_xml_attribute(Xmlhash.parse(xml), 'other_project')
-    expected = { error: 'The current backend implementation is not using binaries from read access protected projects home:Iggy' }
-    assert_equal actual, expected
+    assert_equal expected, actual
+  end
+
+  test 'returns no error if repository access is disabled but project is allowed to build-depend' do
+    User.session = users(:Iggy)
+    project = projects(:home_Iggy)
+    project.allowbuilddeps.create(name: 'allowed_project')
+    flag = project.add_flag('access', 'disable')
+    flag.save
+    project.save
+
+    xml = <<-XML.strip_heredoc
+      <project name='allowed_project'>
+        <title>Up-to-date project</title>
+        <description>the description</description>
+        <repository><path project='home:Iggy'></path></repository>
+      </project>
+    XML
+
+    actual = Project.validate_repository_xml_attribute(Xmlhash.parse(xml), 'allowed_project')
+    assert_equal({}, actual)
   end
 
   test 'returns no error if target project equals project' do
@@ -1045,8 +1064,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = Project.validate_repository_xml_attribute(Xmlhash.parse(xml), 'home:Iggy')
-    expected = {}
-    assert_equal actual, expected
+    assert_equal({}, actual)
   end
 
   test 'get_removed_repositories returns all repositories if new_repositories does not contain the old repositories' do
@@ -1064,7 +1082,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, project.repositories.to_a
+    assert_equal project.repositories.to_a, actual
   end
 
   test 'get_removed_repositories returns the repository if new_repositories does not include it' do
@@ -1082,7 +1100,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, [repositories(:repositories_96)]
+    assert_equal [repositories(:repositories_96)], actual
   end
 
   test 'get_removed_repositories returns no repository if new_repositories matches old_repositories' do
@@ -1100,7 +1118,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, []
+    assert_equal [], actual
   end
 
   test 'get_removed_repositories returns all repositories if new_repositories is empty' do
@@ -1116,7 +1134,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, project.repositories.to_a
+    assert_equal project.repositories.to_a, actual
   end
 
   test 'get_removed_repositories returns nothing if repositories is empty' do
@@ -1134,7 +1152,7 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, []
+    assert_equal [], actual
   end
 
   test 'get_removed_repositories does not include repositories which belong to a remote project' do
@@ -1155,13 +1173,13 @@ class ProjectTest < ActiveSupport::TestCase
     XML
 
     actual = project.get_removed_repositories(Xmlhash.parse(xml))
-    assert_equal actual, [first_repository]
+    assert_equal [first_repository], actual
   end
 
   test 'check repositories returns no error if no linking and no linking taget repository exists' do
     User.session = users(:Iggy)
     actual = Project.check_repositories(@project.repositories)
-    assert_equal actual, {}
+    assert_equal({}, actual)
   end
 
   test 'check repositories returns an error if a linking repository exists' do
@@ -1171,12 +1189,12 @@ class ProjectTest < ActiveSupport::TestCase
     repository = @project.repositories.first
     repository.links << path
 
-    actual = Project.check_repositories(@project.repositories)
     expected = {
       error: "Unable to delete repository; following repositories depend on this project:\nhome:tom/home_coolo_standard"
     }
+    actual = Project.check_repositories(@project.repositories)
 
-    assert_equal actual, expected
+    assert_equal expected, actual
   end
 
   test 'check repositories returns an error if a linking target repository exists' do
@@ -1186,12 +1204,12 @@ class ProjectTest < ActiveSupport::TestCase
     repository = @project.repositories.first
     repository.targetlinks << release_target
 
-    actual = Project.check_repositories(@project.repositories)
     expected = {
       error: "Unable to delete repository; following target repositories depend on this project:\nhome:Iggy/10.2"
     }
+    actual = Project.check_repositories(@project.repositories)
 
-    assert_equal actual, expected
+    assert_equal expected, actual
   end
 
   test 'linked_packages returns all packages from projects inherited by one level' do
@@ -1292,7 +1310,7 @@ class ProjectTest < ActiveSupport::TestCase
   end
 
   test 'config file exists and have the right content' do
-    assert_equal @project.config.content.strip, File.read('test/fixtures/files/home_iggy_project_config.txt').strip
+    assert_equal File.read('test/fixtures/files/home_iggy_project_config.txt').strip, @project.config.content.strip
   end
 
   test 'update config file and reload it, it also should have the right content' do
@@ -1302,17 +1320,17 @@ class ProjectTest < ActiveSupport::TestCase
     User.session = users(:Iggy)
     query_params = { user: User.session!.login, comment: 'Updated by test' }
     assert @project.config.save(query_params, new_project_config)
-    assert_equal @project.config.content, new_project_config
+    assert_equal new_project_config, @project.config.content
 
     # Leave the backend file as it was
     assert @project.config.save(query_params, project_config)
   end
 
   def test_open_requests
-    apache = projects(:Apache)
-    assert_equal apache.open_requests, reviews: [1000, 10, 4], targets: [5], incidents: [], maintenance_release: []
+    expected = { reviews: [1000, 10, 4], targets: [5], incidents: [], maintenance_release: [] }
+    assert_equal expected, projects(:Apache).open_requests
 
-    maintenance = projects(:My_Maintenance)
-    assert_equal maintenance.open_requests, reviews: [], targets: [6], incidents: [6], maintenance_release: [7]
+    expected = { reviews: [], targets: [6], incidents: [6], maintenance_release: [7] }
+    assert_equal expected, projects(:My_Maintenance).open_requests
   end
 end
diff --git a/src/backend/BSConfig.pm.template b/src/backend/BSConfig.pm.template
index 09a0244083c391bc8f7b25625bb07540dae1aca3..7510f9d39b006a3d1a24d80618160696d43e8758 100644
--- a/src/backend/BSConfig.pm.template
+++ b/src/backend/BSConfig.pm.template
@@ -124,28 +124,28 @@ our $packtrack = [];
 our $relsync_pool = {
  "local" => "local",
  "i586" => "i586",
- "x86_64" => "i586",
+ "x86_64" => "x86_64",
  "ppc" => "ppc",
- "ppc64" => "ppc",
- "ppc64le" => "ppc",
+ "ppc64" => "ppc64",
+ "ppc64le" => "ppc64le",
  "mips" => "mips",
- "mips64" => "mips",
+ "mips64" => "mips64",
  "mipsel" => "mipsel",
- "mips64el" => "mipsel",
- "aarch64"  => "arm",
- "aarch64_ilp32"  => "arm",
- "armv4l"  => "arm",
- "armv5l"  => "arm",
- "armv6l"  => "arm",
- "armv6hl" => "arm",
- "armv7l"  => "arm",
- "armv7hl" => "arm",
+ "mips64el" => "mips64el",
+ "aarch64"  => "aarch64",
+ "aarch64_ilp32"  => "aarch64_ilp32",
+ "armv4l"  => "armv4l",
+ "armv5l"  => "armv5l",
+ "armv6l"  => "armv6l",
+ "armv6hl" => "arm6hl",
+ "armv7l"  => "arm7l",
+ "armv7hl" => "arm7hl",
  "armv5el" => "armv5el", # they do not exist
  "armv6el" => "armv6el",
  "armv7el" => "armv7el",
  "armv8el" => "armv8el",
  "sparcv9" => "sparcv9",
- "sparc64" => "sparcv9",
+ "sparc64" => "sparc64",
 };
 
 #No extra stage server sync
@@ -204,6 +204,25 @@ our $relsync_pool = {
 #                           'main' => 'http://main-backend-server:5252',
 #                         };
 
+# Manage these projects' repositories with reprepro instead of using
+# dpkg-scanpackages/dpkg-scansources. Before enabling this, you will
+# need to create /srv/obs/repos/shared/exampledistro/
+# with a paragraph in its conf/distributions file containing
+# "Codename: examplesuite" and "Components: examplecomponent othercomponent"
+#
+#our $reprepository = {
+#  "exampledistro:examplesuite:examplecomponent/debian_wheezy_main_main" => {
+#    "repository" => "shared/exampledistro",
+#    "codename" => "examplesuite",
+#    "component" => "examplecomponent",
+#  },
+#  "exampledistro:examplesuite:othercomponent/debian_wheezy_main_main" => {
+#    "repository" => "shared/exampledistro",
+#    "codename" => "examplesuite",
+#    "component" => "othercomponent",
+#  },
+#};
+
 # host specific configs
 my $hostconfig = __FILE__;
 $hostconfig =~ s/[^\/]*$/bsconfig.$hostname/;
@@ -276,4 +295,10 @@ our $publish_containers = [
 # public cloud uploader configuration
 # our $cloudupload_pubkey = "/etc/obs/cloudupload/_pubkey"; # default setting
 
+my $localconfig = "/etc/obs/BSConfig.local.pm";
+if (-r $localconfig) {
+  print STDERR "reading $localconfig...\n";
+  require $localconfig;
+}
+
 1;
diff --git a/src/backend/BSSched/Access.pm b/src/backend/BSSched/Access.pm
index d4133b362bfc1eb044d45ff7d16316eb2cd5548c..b8667fa29b54f38625c96d1cfcfe4c5cca36a641 100644
--- a/src/backend/BSSched/Access.pm
+++ b/src/backend/BSSched/Access.pm
@@ -42,6 +42,21 @@ sub checkaccess {
   return $access;
 }
 
+sub checkbuilddepok {
+  my ($gctx, $projid, $aprojid) = @_;
+
+  my $adata = $gctx->{projpacks}->{$aprojid} || {};
+  my $allow = $adata->{allowbuilddep} || [];
+
+  foreach my $a (grep { ref($_) eq 'HASH' } @$allow) {
+    if ($a->{name} eq $projid) {
+      return 1;
+    }
+  }
+
+  return 0;
+}
+
 # check if every user from oprojid may access projid
 sub checkroles {
   my ($gctx, $type, $projid, $packid, $oprojid, $opackid) = @_;
@@ -101,6 +116,10 @@ sub checkprpaccess {
   # ok if aprp is not protected
   return 1 if checkaccess($gctx, 'access', $aprojid, undef, $arepoid);
   my ($projid, $repoid) = split('/', $prp, 2);
+
+  # ok if prp has access to aprp (via allowbuilddep in project meta):
+  return 1 if checkbuilddepok($gctx, $projid, $aprojid);
+
   # not ok if prp is unprotected
   return 0 if checkaccess($gctx, 'access', $projid, undef, $repoid);
   # both prp and aprp are proteced.
diff --git a/src/backend/BSXML.pm b/src/backend/BSXML.pm
index 4281b6304b961b22b5c838b8c0bb6b58735805e2..9d759126e6bc6bbe430ad57b482d613ece981a45 100644
--- a/src/backend/BSXML.pm
+++ b/src/backend/BSXML.pm
@@ -115,6 +115,10 @@ our @flags = (
       [ 'access' => @disableenable ],
 );
 
+our @allowbuilddep = (
+      [[ 'allowbuilddep' => 'name' ]],
+);
+
 our @roles = (
      [[ 'person' =>
             'userid',
@@ -153,6 +157,7 @@ our $proj = [
 	    'project',
       ],
 	@roles,
+	@allowbuilddep,
 	$maintenance,
 	@flags,
       [ $repo ],
@@ -302,6 +307,7 @@ our $projpack = [
 	     [],
 	    'title',
 	    'description',
+	     @allowbuilddep,
 	    'config',
 	    'patternmd5',
 	 [[ 'link' =>
diff --git a/src/backend/bs_publish b/src/backend/bs_publish
index bb67bdb32036fe5a0e37dccda731521e9e2333fb..4776ef5e9a290196ce589a5931b0259369305335 100755
--- a/src/backend/bs_publish
+++ b/src/backend/bs_publish
@@ -866,6 +866,75 @@ sub deleterepo_susetags {
   qsystem('rm', '-rf', "$extrep/descr") if -d "$extrep/descr";
 }
 
+sub updaterepo_reprepro {
+  my ($prp, $extrep, @changed) = @_;
+
+  my $repo = "$extrepodir/$BSConfig::reprepository->{$prp}{'repository'}";
+  my $codename = $BSConfig::reprepository->{$prp}{'codename'};
+  my $component = $BSConfig::reprepository->{$prp}{'component'};
+
+  for my $f (@changed) {
+      if ($f =~ /\.changes/) {
+        print " Updated changes file => $f\n" ;
+        my %types;
+        if (open (my $fh, '<', "$extrep/$f")) {
+          # Read the .changes file looking for binary packages
+          # (.deb, .udeb, .ddeb)
+          my $in_files = 0;
+          while (my $line = <$fh>) {
+            print "    $line";
+
+            if ($in_files) {
+              if ($line =~ /^\s/) {
+                if ($line =~ /\.((?:|u|d)deb)$/) {
+                  $types{$1} = 1;
+                }
+              } else {
+                $in_files = 0;
+              }
+            } elsif ($line =~ /^Files\s*:/) {
+              $in_files = 1;
+            }
+          }
+        } else {
+          print "    unable to open $extrep/$f: $!\n";
+        }
+
+        if (!%types) {
+          # no binary packages at all? assume we must have mis-parsed it,
+          # and run reprepro for the .deb files so we get a better
+          # error report
+	  print "  warning: no .deb/.udeb/.ddeb found in .changes, assuming .deb only\n";
+          %types = (deb => 1);
+        }
+
+        foreach my $type (keys %types) {
+
+          my @args = ('reprepro', '-b', $repo,
+            '--ignore=wrongdistribution',
+            '--ignore=conflictingarchall',
+            '--ignore=unusedarch',
+            '--ignore=surprisingbinary',
+            '-T', $type,
+            '-C', $component,
+            'include', $codename,
+            "$extrep/$f");
+          print("  importing .$type binaries: ", join(' ', @args), "\n");
+          qsystem(@args);
+         }
+      } elsif ($f =~ /\.dsc/) {
+        print " Updated dsc file => $f\n" ;
+        my @args = ('reprepro', '-b', $repo,
+          '-C', $component,
+          '-P', 'standard',
+          '-S', 'main',
+          'includedsc', $codename, "$extrep/$f");
+        print("  importing sources: ", join(' ', @args), "\n");
+        qsystem(@args);
+      }
+  }
+}
+
 sub compress_and_rename {
   my ($tmpfile, $file) =@_;
   if (-s $tmpfile) {
@@ -1851,7 +1920,7 @@ sub publish {
 	$p = "$1/$bin";
 	$p = $1 eq 'src' || $1 eq 'nosrc' ? "SRPMS/$bin" : "RPMS/$bin" if $repotype{'resarchhack'};
       } elsif ($bin =~ /^.+_[^_]+_([^_\.]+)\.[ud]?deb$/) {
-	$p = "$1/$bin";
+	$p = "$arch/$bin";
       } elsif ($bin =~ /\.(?:AppImage|AppImage.zsync|snap|exe)?$/) {
 	$p = "$bin";
       } elsif ($bin =~ /\.d?rpm$/) {
@@ -1862,7 +1931,7 @@ sub publish {
       } elsif ($bin =~ /\.deb$/) {
 	# legacy format XXX no udeb handling
 	my $q = Build::query("$r/$rbin", 'evra' => 1);
-	$p = "$q->{'arch'}/$q->{'name'}_$q->{'version'}";
+	$p = "$arch/$q->{'name'}_$q->{'version'}";
 	$p .= "-$q->{'release'}" if defined $q->{'release'};
 	$p .= "_$q->{'arch'}.deb";
       } elsif ($bin =~ /\.pkg\.tar\.(?:gz|xz|zst)(?:\.sig)?$/) {
@@ -1945,6 +2014,8 @@ sub publish {
         } elsif ($bin =~ /\.(?:tgz|zip)?(?:\.sha256)?$/) {
           # esp. for Go builds
           $p = "$bin";
+	} elsif ($bin =~ /\.changes(:?\.sha256)?$/) {
+	  $p = "$arch/$bin";
         } elsif ($bin =~ /\.squashfs$/) {
 	  $p = "$bin";	# for simpleimage builds
 	} elsif ($bin =~ /\.diff\.(?:gz)(?:\.sha256)?$/) {
@@ -2143,6 +2214,7 @@ sub publish {
   # now update external repository
   my @db_deleted;  	# for published db update
   my @db_changed;	# for published db update
+  my @deleted;		# All deleted files for hooks.
 
   my %bins_done;
   @archs = sort(ls($extrep));
@@ -2166,6 +2238,7 @@ sub publish {
         unlink("$r/$bin") || die("unlink $r/$bin: $!\n");
         push @db_deleted, $p if $p =~ /\.(?:$binsufsre)$/;
         $changed = 1;
+	push @deleted, $p;
 	next;
       }
       if ("$s[9]/$s[7]/$s[1]" ne $bins_id{$p}) {
@@ -2656,7 +2729,11 @@ sub publish {
     deleterepo_hdlist2($extrep, $projid, $xrepoid, $data);
   }
   if ($repotype{'debian'}) {
-    createrepo_debian($extrep, $projid, $xrepoid, $data, $repotype{'debian'});
+    if ($BSConfig::reprepository && $BSConfig::reprepository->{$prp}) {
+	updaterepo_reprepro($prp, $extrep, @changed)
+    } else {
+	createrepo_debian($extrep, $projid, $xrepoid, $data, $repotype{'debian'});
+    }
   } else {
     deleterepo_debian($extrep, $projid, $xrepoid, $data);
   }
diff --git a/src/backend/bs_srcserver b/src/backend/bs_srcserver
index 8dc99de5213a923a16a2c380355c43448b652e8d..0274de67c889fadc508fc4d3dd55fa0e8a5a1057 100755
--- a/src/backend/bs_srcserver
+++ b/src/backend/bs_srcserver
@@ -1174,7 +1174,7 @@ sub getprojpack {
     }
     next if $repoids && !grep {$repoids->{$_->{'name'}}} @{$proj->{'repository'} || []};
     next if $packids && !grep {$packids->{$_}} @packages;
-    for (qw{title description build publish debuginfo useforbuild remoteurl remoteproject download link sourceaccess privacy access lock}) {
+    for (qw{title description build publish debuginfo useforbuild remoteurl remoteproject download link sourceaccess privacy access lock allowbuilddep}) {
       $jinfo->{$_} = $proj->{$_} if exists $proj->{$_};
     }
     if ($proj->{'access'}) {
diff --git a/src/backend/build b/src/backend/build
index 83be5905c8a3daf49f1ae8849ae6fceb56242cd3..67fe8315699b2f5850ce5075f57d51ac9944f492 160000
--- a/src/backend/build
+++ b/src/backend/build
@@ -1 +1 @@
-Subproject commit 83be5905c8a3daf49f1ae8849ae6fceb56242cd3
+Subproject commit 67fe8315699b2f5850ce5075f57d51ac9944f492
diff --git a/tests/00-prepare-sources b/tests/00-prepare-sources
new file mode 100755
index 0000000000000000000000000000000000000000..3952105bb7ec5a1eaf6c7005cab7ac440c0ba829
--- /dev/null
+++ b/tests/00-prepare-sources
@@ -0,0 +1,5 @@
+#!/bin/sh -e
+
+sed 's/^deb /deb-src /' /etc/apt/sources.list > /etc/apt/sources.list.d/sources.list
+
+apt-get update
diff --git a/tests/01-build-dash b/tests/01-build-dash
new file mode 100755
index 0000000000000000000000000000000000000000..32f4bba3debe1395985943d9bf03999e9c8227a9
--- /dev/null
+++ b/tests/01-build-dash
@@ -0,0 +1,22 @@
+#!/bin/sh -e
+
+create-debian-project bullseye
+
+create-project bullseye bullseye-derivative
+
+dir="$(mktemp -d)"
+
+cd "$dir"
+
+apt-get source --download-only dash
+
+osc dput bullseye-derivative *.dsc
+
+cd "$OLDPWD"
+
+if ! wait-for-pkg bullseye-derivative dash main
+then
+	# DoD didn't wake up, restart the backend and try again
+	docker-compose exec -T backend supervisorctl restart all
+	wait-for-pkg bullseye-derivative dash main
+fi
diff --git a/tests/scripts/create-debian-project b/tests/scripts/create-debian-project
new file mode 100755
index 0000000000000000000000000000000000000000..30f9a0565af0d577956295e76302af001d3a4d15
--- /dev/null
+++ b/tests/scripts/create-debian-project
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+debian_release=${1:-bullseye}
+
+debian_prj_name="Debian:$debian_release:main"
+debian_repo="main"
+debian_url="http://deb.debian.org/debian/$debian_release/main"
+prj_arch=x86_64
+
+get_debianmeta() {
+	cat << EOF
+<project name="$debian_prj_name">
+  <repository name="$debian_repo">
+    <download arch="$prj_arch" url="$debian_url" repotype="deb"/>
+    <arch>$prj_arch</arch>
+  </repository>
+</project>
+EOF
+}
+
+get_debianconf() {
+	cat << EOF
+Repotype: debian
+type: dsc
+buildengine: debootstrap
+
+Order: base-passwd:base-files
+
+Preinstall: dash bash sed grep coreutils debianutils
+Preinstall: libc6 libncurses5 libacl1 libattr1 libpcre3
+Preinstall: libpcre2-8-0 libcrypt1
+Preinstall: diffutils tar dpkg libc-bin
+Preinstall: gzip base-files base-passwd
+Preinstall: libselinux1 libsepol1
+Preinstall: libgcc-s1 util-linux debconf tzdata findutils libdbus-1-3
+Preinstall: liblzma5 xz-utils libstdc++6 passwd
+Preinstall: login zlib1g libbz2-1.0 libtinfo5 libsigsegv2
+Preinstall: dash insserv libgmp10 libdebconfclient0
+Preinstall: perl-base perl libperl-dev mawk init-system-helpers
+
+Required: build-essential apt mount fakeroot dpkg-dev ncurses-base hostname
+Required: libtool
+
+# Work around package looking up variations of localhost e.g. glibc tries to look up localhost.
+Support: libnss-myhostname
+
+Prefer: mawk
+Prefer: cvs libesd0 libfam0 libfam-dev expect
+Prefer: locales default-jdk
+Prefer: xorg-x11-libs libpng fam mozilla mozilla-nss xorg-x11-Mesa
+Prefer: unixODBC libsoup glitz java-1_4_2-sun gnome-panel
+Prefer: desktop-data-SuSE gnome2-SuSE mono-nunit gecko-sharp2
+Prefer: apache2-prefork openmotif-libs ghostscript-mini gtk-sharp
+Prefer: glib-sharp libzypp-zmd-backend
+Prefer: sysv-rc make
+Prefer: libjack-jackd2-dev libsndio-dev
+Prefer: pkg-config
+Prefer: texlive-htmlxml libncurses-dev
+Prefer: libavcodec58
+Prefer: libsystemd0
+Prefer: libtinfo-dev
+Prefer: libavfilter7
+Prefer: libfontconfig-dev
+EOF
+}
+
+echo "Creating OBS project: $debian_prj_name"
+get_debianmeta | osc meta prj "$debian_prj_name" -F -
+get_debianconf | osc meta prjconf "$debian_prj_name" -F -
diff --git a/tests/scripts/create-project b/tests/scripts/create-project
new file mode 100755
index 0000000000000000000000000000000000000000..ada642d88c5a42c0b4b07847f641b2a96ce58153
--- /dev/null
+++ b/tests/scripts/create-project
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+src_prj_name=$1
+dst_prj_name=$2
+prj_repo=${3:-main}
+prj_arch=${4:-x86_64}
+
+get_prjmeta() {
+	cat << EOF
+<project name="$dst_prj_name">
+  <repository name="$prj_repo">
+    <path project="Debian:$src_prj_name:main" repository="$prj_repo"/>
+    <arch>$prj_arch</arch>
+  </repository>
+</project>
+EOF
+}
+
+get_prjconf() {
+	cat << EOF
+Repotype: debian
+type: dsc
+release: b<B_CNT>
+buildengine: debootstrap
+EOF
+}
+
+echo "Creating OBS Project: $dst_prj_name"
+get_prjmeta | osc meta prj "$dst_prj_name" -F -
+get_prjconf | osc meta prjconf "$dst_prj_name" -F -
diff --git a/tests/scripts/wait-for-pkg b/tests/scripts/wait-for-pkg
new file mode 100755
index 0000000000000000000000000000000000000000..5105f6ab79d7f069e620265d544a63c2f9134f68
--- /dev/null
+++ b/tests/scripts/wait-for-pkg
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+project=$1
+pkg=$2
+prj_repo=${3:-main}
+prj_arch=${4:-x86_64}
+iterations=30
+wait=30
+
+echo "Checking build result: $project $pkg $prj_repo $prj_arch"
+
+echo "Waiting for the build to start"
+
+for i in $(seq $iterations)
+do
+	result=$(osc results "$project" "$pkg" -r "$prj_repo" -a "$prj_arch" 2>/dev/null)
+	echo "$result"
+	if [ "$(echo $result | cut -d' ' -f 3)" = "building" ]
+	then
+		echo "Build started."
+		break
+	fi
+	if [ $i -eq $iterations ]
+	then
+		echo "ERROR: package hasn't started to build!" >&2
+		exit 1
+	fi
+	echo "Sleeping ${wait}s (iteration $i/$iterations)"
+	sleep $wait
+done
+
+for i in $(seq $iterations)
+do
+	osc remotebuildlog "$project" "$pkg" "$prj_repo" "$prj_arch" 2>/dev/null
+
+	result=$(osc results "$project" "$pkg" -r "$prj_repo" -a "$prj_arch" 2>/dev/null)
+	echo "$result"
+	if [ "$(echo $result | cut -d' ' -f 3)" = "succeeded" ]
+	then
+		echo "SUCCESS: package built without an error" >&2
+		break
+	fi
+	if [ $i -eq $iterations ]
+	then
+		echo "ERROR: package not built" >&2
+		exit 1
+	fi
+	echo "Sleeping ${wait}s (iteration $i/$iterations)"
+	sleep $wait
+done
+