Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • RT-PROOFS/rt-proofs
  • sophie/rt-proofs
  • fstutz/rt-proofs
  • sbozhko/rt-proofs
  • fradet/rt-proofs
  • mlesourd/rt-proofs
  • xiaojie/rt-proofs
  • LailaElbeheiry/rt-proofs
  • ptorrx/rt-proofs
  • proux/rt-proofs
  • LasseBlaauwbroek/rt-proofs
  • krathul/rt-proofs
12 results
Show changes
Commits on Source (888)
Showing
with 3208 additions and 2328 deletions
# Configuration file for the 'ack' search utility
# See https://beyondgrep.com/ for details.
# Ignore misc files generated by the build process
--ignore-file=ext:glob,aux,d,conf,orig
--ignore-dir=html
*.v gitlab-language=coq
*.d
*.glob
*.vo
*.vos
*.vok
*.html
/html
*.aux
Makefile.coq*
Makefile.conf
*.crashcoqide
*.v#
*.cache
*~
*.swp
*.orig
*/.#*
#*#
.#*
*.DS_Store
/proof-state
/with-proof-state
/html-alectryon
_esy
esy.lock/
_CoqProject
###################### Global Config ######################
workflow:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS'
when: never
- if: '$CI_COMMIT_BRANCH'
stages:
- build
###################### Rules and Scripts ##################
.not_in_wip_branches:
rules:
- if: ($CI_COMMIT_BRANCH !~ /^wip-.*$/) || ($CI_PIPELINE_SOURCE == "merge_request_event")
.only_in_wip_branches:
rules:
- if: ($CI_COMMIT_BRANCH =~ /^wip-.*$/) && ($CI_PIPELINE_SOURCE != "merge_request_event")
.build-common:
stage: build
script:
- opam pin add -n -y -k path coq-prosa .
- opam install -y -v -j ${NJOBS} coq-prosa
- opam pin add -n -y -k path coq-prosa-refinements .
- opam install -y -v -j ${NJOBS} coq-prosa-refinements
.preferred-stable-version:
image: registry.mpi-sws.org/mathcomp-for-prosa:2.3.0-coq-8.20
.build:
image: registry.mpi-sws.org/mathcomp-for-prosa:${CI_JOB_NAME}
extends: .build-common
.build-and-test-POET:
extends: .build
script:
- git clone https://gitlab.mpi-sws.org/RT-PROOFS/POET.git -b POET-for-Prosa-CI-next /tmp/POET
- !reference [.build-common, script]
- mv /tmp/POET .
- cd POET
- pip3 install --break-system-packages -r requirements.txt
- >-
for WORKLOAD in examples/*.yaml;
do
CERT=certs/cert-for-`basename $WORKLOAD`;
echo; echo; echo;
echo "==================";
echo "Testing $WORKLOAD:";
mkdir -p $CERT;
time ./poet -v -o $CERT -j ${NJOBS} -s $WORKLOAD;
done;
.make-html:
script:
- make html -j ${NJOBS}
- mv html with-proofs
- make gallinahtml -j ${NJOBS}
- mv html without-proofs
- make htmlpretty -j ${NJOBS}
- mv html pretty
###################### The Jobs ######################
2.2.0-coq-8.19:
extends:
- .build
- .not_in_wip_branches
2.3.0-coq-8.20:
extends:
- .build
- .not_in_wip_branches
POET:
extends:
- .build-and-test-POET
- .preferred-stable-version
- .not_in_wip_branches
compile-and-doc:
stage: build
extends:
- .only_in_wip_branches
- .preferred-stable-version
script:
- make -j ${NJOBS} all
- !reference [.make-html, script]
artifacts:
name: "prosa-spec-$CI_COMMIT_REF_NAME"
paths:
- "with-proofs/"
- "without-proofs/"
- "pretty/"
expire_in: 1 week
compile-and-doc-and-validate:
stage: build
extends:
- .not_in_wip_branches
- .preferred-stable-version
script:
- make -j ${NJOBS} all 2>&1 | tee coqc-messages.txt
- if grep -q Warning coqc-messages.txt; then (echo "Please fix all Coq warnings:"; echo "--"; grep -B1 Warning coqc-messages.txt; exit 1); fi
- !reference [.make-html, script]
- scripts/check-axiom-free.sh results
##### `make validate` disabled because of https://github.com/LPCIC/coq-elpi/issues/677 and https://github.com/coq/coq/issues/17345
# - make validate 2>&1 | tee validation-results.txt
# - scripts/check-validation-output.sh --accept-proof-irrelevance validation-results.txt
artifacts:
name: "prosa-spec-$CI_COMMIT_REF_NAME"
paths:
- "with-proofs/"
- "without-proofs/"
- "pretty/"
expire_in: 1 week
mangle-names:
stage: build
extends:
- .not_in_wip_branches
- .preferred-stable-version
script:
- make -j ${NJOBS} mangle-names
# Check that proof irrelevance shows up only due to refinements.
compile-and-validate:
stage: build
extends:
- .not_in_wip_branches
- .preferred-stable-version
script:
- make -j ${NJOBS}
- scripts/check-axiom-free.sh results
##### `make validate` disabled because of https://github.com/LPCIC/coq-elpi/issues/677 and https://github.com/coq/coq/issues/17345
# - make validate 2>&1 | tee validation-results.txt
# - scripts/check-validation-output.sh validation-results.txt
proof-length:
stage: build
image: python:3
script:
- make proof-length
spell-check:
extends:
- .not_in_wip_branches
stage: build
image: registry.mpi-sws.org/aspell-ci:2023-03
script:
- make spell
# mathcomp-dev with stable Coq
#coq-8.17:
# extends:
# - .build-dev
# - .not_in_wip_branches
# # it's ok to fail with an unreleased version of ssreflect
# allow_failure: true
# mathcomp-dev with coq-dev
#coq-dev:
# extends:
# - .build-dev
# - .not_in_wip_branches
# # it's ok to fail with an unreleased version of ssreflect and Coq
# allow_failure: true
proof-state:
extends:
- .not_in_wip_branches
stage: build
image: registry.mpi-sws.org/alectryon-ci:2.3.0-coq-8.20
script:
- eval $(opam env)
- make -j ${NJOBS} all
- make -j ${NJOBS} alectryon
artifacts:
name: "prosa-proof-state-$CI_COMMIT_REF_NAME"
paths:
- "html-alectryon/"
expire_in: 1 week
Björn Brandenburg <bbb@mpi-sws.org> Björn Brandenburg <bbb@mpi-sws.org>
Sergey Bozhko <sbozhko@mpi-sws.org> Sergei Bozhko <sbozhko@mpi-sws.org>
Sergey Bozhko <sbozhko@mpi-sws.org> Sergey Bozhko <gkerfimf@gmail.com>
Sergey Bozhko <sbozhko@mpi-sws.org> Sergey Bozhko <sbozhko@sws-mpi.org>
Kimaya Bedarkar <kbedarka@mpi-sws.org> Kimaya <f20171026@goa.bits-pilani.ac.in>
Kimaya Bedarkar <kbedarka@mpi-sws.org> Kimaya Bedarkar <f20171026@goa.bits-pilani.ac.in>
Kimaya Bedarkar <kbedarka@mpi-sws.org> kbedarka <kbedarka@mpi-sws.org>
Kimaya Bedarkar <kbedarka@mpi-sws.org> kimaya <f20171026@goa.bits-pilani.ac.in>
Kimaya Bedarkar <kbedarka@mpi-sws.org> Kimaya <kbedarka@mpi-sws.org>
Marco Maida <mmaida@mpi-sws.org> Marco Maida <>
Marco Maida <mmaida@mpi-sws.org> mmaida <mmaida@mpi-sws.org>
Mariam Vardishvili <mariamvardishvili@gmail.com> mariamvardishvili <mariamvardishvili@gmail.com>
Mariam Vardishvili <mariamvardishvili@gmail.com> mariamvardishvili <mvardishvili@mpi-sws.org>
Mariam Vardishvili <mariamvardishvili@gmail.com> Mariam Vardishvili <mvardishvili@mpi-sws.org>
Maxime Lesourd <maxime.lesourd@inria.fr> Maxime Lesourd <maxime.lesourd@gmail.com>
Martin Bodin <martin.bodin@inria.fr> Martin Constantino–Bodin <martin.bodin@ens-lyon.org>
Martin Bodin <martin.bodin@inria.fr> Martin Constantino–Bodin <martin.bodin@inria.fr>
Pierre Roux <pierre.roux@onera.fr> Pierre Roux <pierre@roux01.fr>
Pierre Roux <pierre.roux@onera.fr> Pierre Roux <proux@mpi-sws.org>
Sophie Quinton <sophie.quinton@inria.fr> sophie quinton <squinton@inrialpes.fr>
Sophie Quinton <sophie.quinton@inria.fr> sophie quinton <sophie.quinton@inria.fr>
Xiaojie GUO <xiaojie.guo@inria.fr> Xiaojie GUO <gxj0303@loic.inrialpes.fr>
Meenal Gupta <meenalg727@gmail.com> Meenal-27 <80755301+Meenal-27@users.noreply.github.com>
BSD 2-Clause License
Copyright (c) 2014–2022, The Prosa Project & Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#############################################################################
## v # The Coq Proof Assistant ##
## <O___,, # INRIA - CNRS - LIX - LRI - PPS ##
## \VV/ # ##
## // # Makefile automagically generated by coq_makefile V8.4pl4 ##
#############################################################################
# WARNING
#
# This Makefile has been automagically generated
# Edit at your own risks !
#
# END OF WARNING
#
# This Makefile was generated by the command line :
# coq_makefile arrival_sequence.v bertogna_edf_comp.v bertogna_edf_theory.v bertogna_fp_comp.v bertogna_fp_theory.v interference_bound_edf.v interference.v job.v platform.v priority.v response_time.v schedulability.v schedule.v ssromega.v task_arrival.v task.v util_divround.v util_lemmas.v Vbase.v workload_bound.v workload.v
#
.DEFAULT_GOAL := all
#
# This Makefile may take arguments passed as environment variables:
# COQBIN to specify the directory where Coq binaries resides;
# ZDEBUG/COQDEBUG to specify debug flags for ocamlc&ocamlopt/coqc;
# DSTROOT to specify a prefix to install path.
# Here is a hack to make $(eval $(shell works:
define donewline
endef
includecmdwithout@ = $(eval $(subst @,$(donewline),$(shell { $(1) | tr -d '\r' | tr '\n' '@'; })))
$(call includecmdwithout@,$(COQBIN)coqtop -config)
##########################
# #
# Libraries definitions. #
# #
##########################
COQLIBS?=-I .
COQDOCLIBS?=
##########################
# #
# Variables definitions. #
# #
##########################
OPT?=
COQDEP?=$(COQBIN)coqdep -c
COQFLAGS?=-q $(OPT) $(COQLIBS) $(OTHERFLAGS) $(COQ_XML)
COQCHKFLAGS?=-silent -o
COQDOCFLAGS?=-interpolate -utf8
COQC?=$(COQBIN)coqc
GALLINA?=$(COQBIN)gallina
COQDOC?=$(COQBIN)coqdoc
COQCHK?=$(COQBIN)coqchk
##################
# #
# Install Paths. #
# #
##################
ifdef USERINSTALL
XDG_DATA_HOME?=$(HOME)/.local/share
COQLIBINSTALL=$(XDG_DATA_HOME)/coq
COQDOCINSTALL=$(XDG_DATA_HOME)/doc/coq
else
COQLIBINSTALL=${COQLIB}user-contrib
COQDOCINSTALL=${DOCDIR}user-contrib
endif
######################
# #
# Files dispatching. #
# #
######################
VFILES:=arrival_sequence.v\
bertogna_edf_comp.v\
bertogna_edf_theory.v\
bertogna_fp_comp.v\
bertogna_fp_theory.v\
interference_bound_edf.v\
interference.v\
interference_edf.v\
job.v\
platform.v\
platform_edf.v\
priority.v\
response_time.v\
schedulability.v\
schedule.v\
ssromega.v\
task_arrival.v\
task.v\
util_divround.v\
util_lemmas.v\
Vbase.v\
workload_bound.v\
workload.v
-include $(addsuffix .d,$(VFILES))
.SECONDARY: $(addsuffix .d,$(VFILES))
VOFILES:=$(VFILES:.v=.vo)
VOFILESINC=$(filter $(wildcard ./*),$(VOFILES))
GLOBFILES:=$(VFILES:.v=.glob)
VIFILES:=$(VFILES:.v=.vi)
GFILES:=$(VFILES:.v=.g)
HTMLFILES:=$(VFILES:.v=.html)
GHTMLFILES:=$(VFILES:.v=.g.html)
ifeq '$(HASNATDYNLINK)' 'true'
HASNATDYNLINK_OR_EMPTY := yes
else
HASNATDYNLINK_OR_EMPTY :=
endif
#######################################
# #
# Definition of the toplevel targets. #
# #
#######################################
all: $(VOFILES)
spec: $(VIFILES)
gallina: $(GFILES)
html: $(GLOBFILES) $(VFILES)
- mkdir -p html
$(COQDOC) -toc $(COQDOCFLAGS) -html $(COQDOCLIBS) -d html $(VFILES)
gallinahtml: $(GLOBFILES) $(VFILES)
- mkdir -p html
$(COQDOC) -toc $(COQDOCFLAGS) -html -g $(COQDOCLIBS) -d html $(VFILES)
all.ps: $(VFILES)
$(COQDOC) -toc $(COQDOCFLAGS) -ps $(COQDOCLIBS) -o $@ `$(COQDEP) -sort -suffix .v $^`
all-gal.ps: $(VFILES)
$(COQDOC) -toc $(COQDOCFLAGS) -ps -g $(COQDOCLIBS) -o $@ `$(COQDEP) -sort -suffix .v $^`
all.pdf: $(VFILES)
$(COQDOC) -toc $(COQDOCFLAGS) -pdf $(COQDOCLIBS) -o $@ `$(COQDEP) -sort -suffix .v $^`
all-gal.pdf: $(VFILES)
$(COQDOC) -toc $(COQDOCFLAGS) -pdf -g $(COQDOCLIBS) -o $@ `$(COQDEP) -sort -suffix .v $^`
validate: $(VOFILES)
$(COQCHK) $(COQCHKFLAGS) $(COQLIBS) $(notdir $(^:.vo=))
beautify: $(VFILES:=.beautified)
for file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done
@echo 'Do not do "make clean" until you are sure that everything went well!'
@echo 'If there were a problem, execute "for file in $$(find . -name \*.v.old -print); do mv $${file} $${file%.old}; done" in your shell/'
.PHONY: all opt byte archclean clean install userinstall depend html validate
####################
# #
# Special targets. #
# #
####################
byte:
$(MAKE) all "OPT:=-byte"
opt:
$(MAKE) all "OPT:=-opt"
userinstall:
+$(MAKE) USERINSTALL=true install
install:
install -d $(DSTROOT)$(COQLIBINSTALL)/$(INSTALLDEFAULTROOT); \
for i in $(VOFILESINC); do \
install -m 0644 $$i $(DSTROOT)$(COQLIBINSTALL)/$(INSTALLDEFAULTROOT)/`basename $$i`; \
done
install-doc:
install -d $(DSTROOT)$(COQDOCINSTALL)/$(INSTALLDEFAULTROOT)/html
for i in html/*; do \
install -m 0644 $$i $(DSTROOT)$(COQDOCINSTALL)/$(INSTALLDEFAULTROOT)/$$i;\
done
clean:
rm -f $(VOFILES) $(VIFILES) $(GFILES) $(VFILES:.v=.v.d) $(VFILES:=.beautified) $(VFILES:=.old)
rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob $(VFILES:.v=.glob) $(VFILES:.v=.tex) $(VFILES:.v=.g.tex) all-mli.tex
- rm -rf html mlihtml
archclean:
rm -f *.cmx *.o
printenv:
@$(COQBIN)coqtop -config
@echo CAMLC = $(CAMLC)
@echo CAMLOPTC = $(CAMLOPTC)
@echo PP = $(PP)
@echo COQFLAGS = $(COQFLAGS)
@echo COQLIBINSTALL = $(COQLIBINSTALL)
@echo COQDOCINSTALL = $(COQDOCINSTALL)
###################
# #
# Implicit rules. #
# #
###################
%.vo %.glob: %.v
$(COQC) $(COQDEBUG) $(COQFLAGS) $*
%.vi: %.v
$(COQC) -i $(COQDEBUG) $(COQFLAGS) $*
%.g: %.v
$(GALLINA) $<
%.tex: %.v
$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@
%.html: %.v %.glob
$(COQDOC) $(COQDOCFLAGS) -html $< -o $@
%.g.tex: %.v
$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@
%.g.html: %.v %.glob
$(COQDOC)$(COQDOCFLAGS) -html -g $< -o $@
%.v.d: %.v
$(COQDEP) -slash $(COQLIBS) "$<" > "$@" || ( RV=$$?; rm -f "$@"; exit $${RV} )
%.v.beautified:
$(COQC) $(COQDEBUG) $(COQFLAGS) -beautify $*
# WARNING
#
# This Makefile has been automagically generated
# Edit at your own risks !
#
# END OF WARNING
# Makefile for Prosa
COQ_PROJ := _CoqProject
COQ_MAKEFILE := Makefile.coq
FIND_OPTS := . -name '*.v' ! -name '*\#*' ! -path './.git/*' ! -path './with-proof-state/*'
default: prosa
all: allCoqProject
$(MAKE) $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE)
prosa: prosaCoqProject
$(MAKE) $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE)
refinements: refinementsCoqProject
$(MAKE) $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE)
mangle-names: mangle-namesCoqProject
$(MAKE) $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE)
commonCoqProject:
@$(RM) -f $(COQ_PROJ)
@echo "# Automatically created by make, do not edit" > $(COQ_PROJ)
@echo "# (edit Makefile instead)" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@echo "-arg \"-w -notation-overriden,-parsing,-projection-no-head-constant,-ambiguous-paths\"" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
allCoqProject: commonCoqProject
@echo "-R . prosa" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@find $(FIND_OPTS) \
-print | scripts/module-toc-order.py >> $(COQ_PROJ)
prosaCoqProject: commonCoqProject
@echo "-R . prosa" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@find $(FIND_OPTS) \
! -path './implementation/refinements/*' \
-print | scripts/module-toc-order.py >> $(COQ_PROJ)
refinementsCoqProject: commonCoqProject
@echo "-R implementation/refinements prosa.implementation.refinements" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@find $(FIND_OPTS) \
-path './implementation/refinements/*' \
-print | scripts/module-toc-order.py >> $(COQ_PROJ)
mangle-namesCoqProject: commonCoqProject
@echo "-arg \"-mangle-names __\"" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@echo "-R . prosa" >> $(COQ_PROJ)
@echo "" >> $(COQ_PROJ)
@find $(FIND_OPTS) \
-print | scripts/module-toc-order.py >> $(COQ_PROJ)
$(COQ_MAKEFILE): $(COQ_PROJ)
@coq_makefile -f $< -o $@ COQDOCEXTRAFLAGS = "--parse-comments --external https://math-comp.github.io/htmldoc/ mathcomp --external mathcomp https://math-comp.github.io/htmldoc/"
install htmlpretty clean cleanall validate alectryon: $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE) $@
html gallinahtml: $(COQ_MAKEFILE)
$(MAKE) -f $(COQ_MAKEFILE) $@
@# Prosa hack: let's tweak the style a bit
@sed -i.bak "s/#90bdff/#eeeeee/" html/coqdoc.css
@rm html/coqdoc.css.bak
%.vo: %.v
$(MAKE) -f $(COQ_MAKEFILE) $@
vacuum: allCoqProject cleanall
@echo 'VACUUMING *.vo *.glob .*.aux <empty directories>'
@find . -depth \( -iname '*.vo' -or -iname '*.glob' -or -iname '.*.aux' \) ! -path './.git/*' -delete
@find . -depth -type d -empty ! -path './.git/*' -delete
macos-clean:
@echo 'CLEAN .DS_Store'
@find . -depth -iname '.DS_Store' ! -path './.git/*' -delete
spell:
./scripts/flag-typos-in-comments.sh `find . -iname '*.v'`
./scripts/flag-typos-in-Markdown.sh `find . -iname '*.md'`
proof-length:
./scripts/proofloc.py --check `find . -iname '*.v'`
distclean: cleanall
$(RM) $(COQ_PROJ)
help:
@echo "You can run:"
@echo
@echo "'make' or 'make prosa' to build Prosa main development"
@echo "'make refinements' to build the refinement part only"
@echo " (requires the main development to be installed)"
@echo "'make all' to build everything"
@echo
@echo "'make install' to install previously compiled files"
@echo "'make validate' to check and verify all proofs"
@echo
@echo "'make htmlpretty' to build the documentation based on CoqdocJS"
@echo " (can hide/show proofs)"
@echo "'make gallinahtml' to build the documentation without proofs"
@echo "'make html' to build the documentation with all proofs"
@echo
@echo "'make clean' to remove generated files"
@echo "'make vacumm' to clean .vo .glob .aux files and empty dirs"
@echo "'make macos-clean' to clean macos' .DS_Store dirs"
@echo "'make distclean' to remove all generated files"
@echo
@echo "'make mangle-names' to compile with mangle-names option"
@echo "'make spell' to run a spell checker on comments and Markdown files"
@echo "'make proof-length' to flag too-long proofs"
@echo
.PHONY: all prosa refinements mangle-names mangle-namesCoqProject
.PHONY: commonCoqProject allCoqProject prosaCoqProject refinementsCoqProject
.PHONY: install html gallinahtml htmlpretty clean cleanall validate alectryon
.PHONY: vacuum macos-clean spell proof-length distclean help
# Prosa: include pretty documentation targets
include scripts/coqdocjs/Makefile.coqdocjs
include scripts/Makefile.alectryon
# Hack to enable esy support:
#
# In the esy container, $HOME is hidden, but this causes Coq to complain.
# We thus include a dummy $HOME value.
HOME ?= $(DESTDIR)
export HOME
# RT-PROOFS
# Prosa: Formally Proven Schedulability Analysis
This repository contains the main Coq proof spec & proof development of the RT-PROOFS project.
This repository contains the main Coq specification & proof development of the [Prosa open-source project](https://prosa.mpi-sws.org), which was launched in 2016.
From 2018–201, Prosa was further developed in the context of the [RT-Proofs research project](https://rt-proofs.inria.fr/) (funded jointly by ANR and DFG, projects ANR-17-CE25-0016 and DFG-391919384, respectively).
## Plan
<center><img alt="RT-Proofs logo" src="http://prosa.mpi-sws.org/figures/rt-proofs-logo.png" width="300px"></center>
For now, this is more or less just a "code dump" with a flat hierarchy to get things started.
Prosa continues to be maintained and developed by the MPI-SWS Real-Time Systems group and contributors. Patches and merge requests are very welcome!
Going forward, the goal is to both
## Documentation
- restructure the repository as it grows in scope, and to
Up-to-date documentation for all branches of the main Prosa repository is available on the Prosa homepage:
- add significant documentation to make it easier to bring new collaborators who are not yet familiar with Coq into the project.
- <https://prosa.mpi-sws.org/branches>
## Publications
Please see the [list of publications](https://prosa.mpi-sws.org/publications.html) on the Prosa project's homepage.
### Classic Prosa
All results published prior to 2020 used the "classic" version of Prosa as first presented at ECRTS'16. Classic Prosa has been superseded by this development and is no longer being maintained. Refer to [the `classic-prosa` branch](https://gitlab.mpi-sws.org/RT-PROOFS/rt-proofs/-/tree/classic-prosa) to find this original version of Prosa.
## Directory and Module Structure
The directory and module structure is organized as follows. First, the main parts, of which there are currently six.
- [`behavior`](behavior/): The `behavior` namespace collects basic definitions and properties of system behavior (i.e., it defines Prosa's **trace-based semantics**). There are *no* proofs here. This module is mandatory: *all* results in Prosa rely on the basic trace-based semantics defined in this module.
- [`model`](model/): The `model` namespace collects all definitions and basic properties of various **system models** (e.g., sporadic tasks, arrival curves, various scheduling policies, etc.). There are only few proofs here. This module contains multiple, mutually exclusive alternatives (e.g., periodic vs. sporadic tasks, uni- vs. multiprocessor models, constrained vs. arbitrary deadlines, etc.), and higher-level results are expected "pick and choose" whatever definitions and assumptions are appropriate. These models are *axiomatic* in the sense that they are collections of hypotheses and not necessarily backed by concrete implementations (but see below).
- [`analysis`](analysis/): The `analysis` namespace collects all definitions and proof libraries needed to establish **system properties** (e.g., schedulability, response time, etc.). This includes a substantial library of *basic facts* that follow directly from the trace-based semantics or specific modelling assumptions. Virtually all intermediate steps and low-level proofs will be found here.
- [`results`](results/): The `results` namespace contains all **high-level analysis results**.
- [`implementation`](implementation/): This module collects concrete implementations of some of the axiomatic models and scheduling policies to which the results apply. These are used to instantiate (i.e., apply) high-level results in an assumption-free environment for concrete job and task types, which establishes the absence of contradictions in the axiomatic models refined by the implementations.
- [`implementation.refinements`](implementation/refinements/): The refinements needed by [POET](https://drops.dagstuhl.de/opus/volltexte/2022/16336/) to compute with large numbers, based on [CoqEAL](https://github.com/coq-community/CoqEAL).
Furthermore, there are a couple of additional folders and modules.
- [`util`](util/): A collection of miscellaneous "helper" lemmas and tactics. Used throughout the rest of Prosa.
- [`scripts/`](scripts/): Scripts and supporting resources required for continuous integration and documentation generation.
## Side-effects
Importing Prosa changes the behavior of ssreflect's `done` tactic by adding basic lemmas about real-time systems.
```
Ltac done := solve [ ssreflect.done | eauto 4 with basic_rt_facts ].
```
Prosa overwrites the default obligation tactic of Coq with `idtac`. Refer [`util.tactics`](util/tactics.v) for details.
## Installation
### With the OCaml Package Manager (`opam`)
Prosa can be installed using [`opam`](https://opam.ocaml.org/) (>= 2.0).
```bash
opam repo add coq-released https://coq.inria.fr/opam/released
# or for the dev version (git master): https://coq.inria.fr/opam/extra-dev
opam update
opam install coq-prosa
```
### From Sources with `opam`
OPAM can also be used to install a local checkout. For example, this is done in the CI setup (see `.gitlab-ci.yaml`).
```bash
opam repo add coq-released https://coq.inria.fr/opam/released
opam update
opam pin add -n -y -k path coq-prosa .
opam install coq-prosa
```
### From Sources With `esy`
Prosa can be installed using the [`esy`](https://esy.sh/) package manager.
#### Installing `esy`
`esy` itself can typically be installed through `npm`.
It should look something like this on most `apt`-based systems:
```bash
sudo apt install npm
sudo npm install --global esy@latest
```
#### Installing Prosa
With `esy` in place, it is easy to compile Prosa in one go. To download and compile all of Prosa's dependencies (including Coq), and then to compile Prosa itself, simply issue the command:
```bash
esy
```
Note that `esy` uses an internal compilation environment, which is not exported to the current shell.
To work within this environment, prefix any command with `esy`: for instance `esy coqide` to run your system’s CoqIDE within the right environment.
Alternatively, `esy shell` opens a shell within its environment.
### Manually From Sources
#### Dependencies
Besides on Coq itself, Prosa depends on
1. the `ssreflect` library of the [Mathematical Components project](https://math-comp.github.io),
2. the [Micromega support for the Mathematical Components library](https://github.com/math-comp/mczify) provided by `mczify`, and
3. the [The Coq Effective Algebra Library](https://github.com/coq-community/coqeal) (optional, needed only for POET-related refinements).
These dependencies can be easily installed with OPAM.
```bash
opam install -y coq-mathcomp-ssreflect coq-mathcomp-zify coq-coqeal
```
Prosa always tracks the latest stable versions of Coq and ssreflect. We do not maintain compatibility with older versions of either Coq or ssreflect.
#### Compiling Prosa
Assuming all dependencies are available (either via OPAM or compiled from source, see the [Prosa setup instructions](http://prosa.mpi-sws.org/setup-instructions.html)), compiling Prosa can be done by just typing:
```bash
make -j
```
Depending on how powerful your machine is, this will take a few minutes.
The library can then be installed with:
```bash
make install
```
To compile the POET-related refinements (which require CoqEAL to be installed and inject a dependency on the *proof irrelevance* axiom), first install the main Prosa library with the two commands above, then type:
```bash
make refinements
```
The newly compiled files can then be installed with:
```bash
make install
```
## Generating HTML Documentation
Once the has been library compiled, the `coqdoc` documentation (as shown on the [web page](http://prosa.mpi-sws.org/documentation.html)) can be easily generated with:
- `make htmlpretty -j` --- pretty documentation based on CoqdocJS (can hide/show proofs),
- `make gallinahtml -j` --- just the specification, without proofs,
- `make html -j` --- full specification with all proofs.
Since `coqdoc` requires object files (`*.vo`) as input, please make sure that the code compiles.
## Commit and Development Rules
We very much welcome external contributions. Please don't hesitate to [get in touch](http://prosa.mpi-sws.org/get-in-touch.html)!
To make things as smooth as possible, here are a couple of rules and guidelines that we seek to abide by.
1. Always follow the project [coding and writing guidelines](doc/guidelines.md).
2. Make sure the master branch "compiles" at each commit. This is not true for the early history of the repository, but going forward we should strive to keep it working at all times.
2. Make sure the master branch "compiles" at each commit. This is not true for the early history of the repository, and during certain stretches of heavy refactoring, but going forward we should strive to keep it working at all times.
3. It's okay (and even recommended) to develop in a (private) dirty branch, but please clean up and re-base your branch (i.e., `git-rebase -i`) on top of the current master branch before opening a merge request.
4. Create merge requests liberally. No improvement is too small or too insignificant for a merge request. This applies to documentation fixes (e.g., typo fixes, grammar fixes, clarifications, etc.) as well.
5. If you are unsure whether a proposed change is even acceptable or the right way to go about things, create a work-in-progress (WIP) merge request as a basis for discussion. A WIP merge request is prefixed by "WIP:" or "Draft:".
3. It's ok to develop in a (private) dirty branch, but then clean up and `git-rebase -i` on top of the current master before merging your work into master.
6. We strive to have only "fast-forward merges" without merge commits, so always re-base your branch to linearize the history before merging. (WIP branches do not need to be linear.)
4. It's usually a good idea to ask first on the mailing list before merging a substantial change.
7. Recommendation: Document the tactics that you use in the [list of tactics](doc/tactics.md), especially when introducing/using non-standard tactics.
5. Pushing fixes, small improvements, etc. is always ok.
8. If something seems confusing, please help with improving the documentation. :-)
6. Document the tactics that you use in the [list of tactics](doc/tactics.md).
\ No newline at end of file
9. If you don't know how to fix or improve something, or if you have an open-ended suggestion in need of discussion, please file a ticket.
This diff is collapsed.
# Prosa Analysis Library
This module provides virtually all **analysis concepts and definitions** and encapsulates the bulk of the **intermediate proofs** in Prosa.
More precisely, this module collects all definitions that are not needed to define the behavioral semantics (found in [`prosa.behavior`](../behavior)) and that also do not conceptually form part of the task and system model (found in [`prosa.model`](../model)). For example, precise notions of "schedulability," "busy window," or "response time" are clearly analysis concepts and consequently defined here.
The proofs found in this module are "intermediate proofs" in the sense that they are not an end in of themselves; rather, they are a means to enable the proofs of the high-level results provided in the [`prosa.results`](../results) module.
## Structure
The Prosa analysis library is currently organized as follows:
- [`abstract`](./abstract): This provides **abstract RTA** analysis framework, a general and extensible approach to response-time analysis (RTA).
- [`definitions`](./definitions): This folder currently collects all major and minor **analysis definitions** (such as schedulability, priority inversion, etc.).
- [`transform`](./transform): This folder contains procedures for transforming schedules, to be used in proofs that rely on modifying a given reference schedule in some way (e.g., the EDF "swapping argument").
- [`facts`](./facts): Currently the home of virtually all **intermediate proofs**. In particular, [facts.behavior](./facts/behavior) provides a library of basic facts that follow (mostly) directly from Prosa's trace semantics (as given in [`prosa.behavior`](../behavior)).
**NB**: It is expected that the analysis module will be further (re)organized and (re)structured in future versions of Prosa.
## Guidelines
- As a general rule, keep definitions and proofs separate, so that casual readers may read and understand the Prosa specification without encountering intermediate lemmas and proofs.
- Prefer folders with many short files over fewer, longer files.
- In each file that is specific to some system model, explicitly `Require` the specific modules that jointly constitute the assumed system model (e.g., `Require prosa.model.processor.ideal` to express that the results in a file depend on the ideal-uniprocessor assumption).
\ No newline at end of file
Require Export prosa.analysis.abstract.abstract_rta.
(** In this module, we define _intra-supply interference_ as a kind of
conditional interference and use it to define an _intra-supply_
interference-bound function ([intra_IBF]). Recall that an
interference-bound function is a function that bounds the
interference experienced by a job of a task under analysis. This
way, the function [intra_IBF] bounds the cumulative interference
ignoring the interference due to a lack of supply.
Here, the term "intra-supply" is inspired by (but not limited to)
reservation-based schedulers, where a job only receives service
within a corresponding reservation. Similarly, intra-supply
interference refers to interference that occurs "inside" the time
instances where supply is present. *)
Section IntraInterferenceBound.
(** Consider any type of job associated with any type of tasks ... *)
Context {Job : JobType}.
Context {Task : TaskType}.
Context `{JobTask Job Task}.
(** ... with arrival times and costs. *)
Context `{JobArrival Job}.
Context `{JobCost Job}.
(** Consider any kind of processor state model. *)
Context {PState : ProcessorState Job}.
(** Consider any arrival sequence ... *)
Variable arr_seq : arrival_sequence Job.
(** ... and any schedule of this arrival sequence. *)
Variable sched : schedule PState.
(** Let [tsk] be any task that is to be analyzed. *)
Variable tsk : Task.
(** Assume we are provided with abstract functions for interference
and interfering workload. *)
Context `{Interference Job}.
Context `{InterferingWorkload Job}.
(** Next, we introduce the notions of _intra-supply_ interference
and intra-supply IBF. Using these notions, one can separate the
interference due to the lack of supply from all the other
sources of interference. *)
(** We define intra-supply interference as interference conditioned
on the fact that there is supply. That is, a job [j] experiences
intra-supply interference at a time instant [t] if [j]
experiences interference at time [t] _and_ there is supply at
[t]. *)
Definition intra_interference (j : Job) (t : instant) :=
cond_interference (fun j t => has_supply sched t) j t.
(** We define the cumulative intra-supply interference. *)
Definition cumul_intra_interference j t1 t2 :=
cumul_cond_interference (fun j t => has_supply sched t) j t1 t2.
(** Consider an interference bound function [intra_IBF]. *)
Variable intra_IBF : duration -> duration -> work.
(** We say that intra-supply interference is bounded by [intra_IBF]
iff, for any job [j] of task [tsk], cumulative _intra-supply_
interference within the interval <<[t1, t1 + R)>> is bounded by
[intra_IBF(A, R)].*)
Definition intra_interference_is_bounded_by :=
cond_interference_is_bounded_by
arr_seq sched tsk intra_IBF
(relative_arrival_time_of_job_is_A sched) (fun j t => has_supply sched t).
End IntraInterferenceBound.
Require Export prosa.analysis.abstract.abstract_rta.
Require Export prosa.analysis.abstract.IBF.supply.
Require Export prosa.analysis.abstract.IBF.task.
(** In this module, we define a _task-intra-supply_-interference-bound
function ([task_intra_IBF]). Function [task_intra_IBF] bounds
interference ignoring interference due to lack of supply _and_ due
to self-interference. *)
Section TaskIntraInterferenceBound.
(** Consider any type of job associated with any type of tasks ... *)
Context {Job : JobType}.
Context {Task : TaskType}.
Context `{JobTask Job Task}.
(** ... with arrival times and costs. *)
Context `{JobArrival Job}.
Context `{JobCost Job}.
(** Consider any kind of processor state model. *)
Context {PState : ProcessorState Job}.
(** Consider any arrival sequence ... *)
Variable arr_seq : arrival_sequence Job.
(** ... and any schedule of this arrival sequence. *)
Variable sched : schedule PState.
(** Let [tsk] be any task that is to be analyzed. *)
Variable tsk : Task.
(** Assume we are provided with abstract functions characterizing
interference and interfering workload. *)
Context `{Interference Job}.
Context `{InterferingWorkload Job}.
(** Next, we introduce the notions of "task intra-supply
interference" and "task intra-supply IBF". Using these notions,
one can separate the interference due to the lack of supply or
due to self-interference from all the other sources of
interference. *)
(** Let us define a predicate stating that (1) there is supply at
time [t] and (2) the task of a job [j] is _not_ scheduled at a
time instant [t]. *)
Definition nonself_intra (j : Job) (t : instant) :=
(nonself arr_seq sched j t) && has_supply sched t.
(** We define _task intra-supply interference_ as interference
conditioned on the fact that there is supply and there is no
self-interference. *)
Definition task_intra_interference (j : Job) (t : instant) :=
cond_interference nonself_intra j t.
(** Consider an interference bound function [task_intra_IBF]. *)
Variable task_intra_IBF : duration -> duration -> work.
(** We say that task intra-supply interference is bounded by
[task_intra_IBF] iff, for any job [j] of task [tsk], the
cumulative task intra-supply interference within the interval
<<[t1, t1 + R)>> is bounded by [task_intra_IBF(A, R)]. *)
Definition task_intra_interference_is_bounded_by :=
cond_interference_is_bounded_by
arr_seq sched tsk task_intra_IBF
(relative_arrival_time_of_job_is_A sched) nonself_intra.
End TaskIntraInterferenceBound.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Require Export prosa.analysis.facts.model.ideal.schedule.
Require Export prosa.analysis.definitions.schedulability.
Require Export prosa.analysis.abstract.search_space.
Require Export prosa.analysis.abstract.abstract_rta.
Require Export prosa.analysis.abstract.iw_auxiliary.
(** * Abstract Response-Time Analysis For Processor State with Ideal Uni-Service Progress Model *)
(** In this module, we present an instantiation of the general
response-time analysis framework for models that satisfy the ideal
uni-service progress assumptions. *)
(** We prove that the maximum (with respect to the set of offsets)
among the solutions of the response-time bound recurrence is a
response-time bound for [tsk]. Note that in this section we add
additional restrictions on the processor state. These assumptions
allow us to eliminate the second equation from [aRTA+]'s
recurrence since jobs experience no delays while executing
non-preemptively. *)
Section AbstractRTAIdeal.
(** Consider any type of tasks ... *)
Context {Task : TaskType}.
Context `{TaskCost Task}.
Context `{TaskRunToCompletionThreshold Task}.
(** ... and any type of jobs associated with these tasks. *)
Context {Job : JobType}.
Context `{JobTask Job Task}.
Context `{JobArrival Job}.
Context `{JobCost Job}.
Context `{JobPreemptable Job}.
(** Consider any kind of uni-service ideal processor state model. *)
Context `{PState : ProcessorState Job}.
Hypothesis H_ideal_progress_proc_model : ideal_progress_proc_model PState.
Hypothesis H_unit_service_proc_model : unit_service_proc_model PState.
(** Consider any arrival sequence with consistent, non-duplicate arrivals. *)
Variable arr_seq : arrival_sequence Job.
Hypothesis H_valid_arrival_sequence : valid_arrival_sequence arr_seq.
(** Next, consider any schedule of this arrival sequence... *)
Variable sched : schedule PState.
Hypothesis H_jobs_come_from_arrival_sequence:
jobs_come_from_arrival_sequence sched arr_seq.
(** ... where jobs do not execute before their arrival nor after completion. *)
Hypothesis H_jobs_must_arrive_to_execute : jobs_must_arrive_to_execute sched.
Hypothesis H_completed_jobs_dont_execute : completed_jobs_dont_execute sched.
(** Assume that the job costs are no larger than the task costs. *)
Hypothesis H_valid_job_cost : arrivals_have_valid_job_costs arr_seq.
(** Consider a task set [ts]... *)
Variable ts : list Task.
(** ... and a task [tsk] of [ts] that is to be analyzed. *)
Variable tsk : Task.
Hypothesis H_tsk_in_ts : tsk \in ts.
(** Consider a valid preemption model... *)
Hypothesis H_valid_preemption_model :
valid_preemption_model arr_seq sched.
(** ...and a valid task run-to-completion threshold function. That
is, [task_rtc tsk] is (1) no bigger than [tsk]'s cost and (2)
for any job [j] of task [tsk] [job_rtct j] is bounded by
[task_rtct tsk]. *)
Hypothesis H_valid_run_to_completion_threshold :
valid_task_run_to_completion_threshold arr_seq tsk.
(** Assume we are provided with abstract functions for Interference
and Interfering Workload. *)
Context `{Interference Job}.
Context `{InterferingWorkload Job}.
(** We assume that the scheduler is work-conserving. *)
Hypothesis H_work_conserving : work_conserving arr_seq sched.
(** Let [L] be a constant that bounds any busy interval of task [tsk]. *)
Variable L : duration.
Hypothesis H_busy_interval_exists :
busy_intervals_are_bounded_by arr_seq sched tsk L.
(** Next, assume that [interference_bound_function] is a bound on
the interference incurred by jobs of task [tsk] parametrized by
the relative arrival time [A]. *)
Variable interference_bound_function : (* A *) duration -> (* Δ *) duration -> duration.
Hypothesis H_job_interference_is_bounded :
job_interference_is_bounded_by
arr_seq sched tsk interference_bound_function (relative_arrival_time_of_job_is_A sched).
(** To apply the generalized aRTA, we have to instantiate [IBF_P]
and [IBF_NP]. In this file, we assume we are given a generic function
[interference_bound_function] that bounds interference of jobs
of tasks in [ts] and which have to be instantiated in subsequent
files. We use this function to instantiate [IBF_P]. *)
(** However, we still have to instantiate function [IBF_NP], which
is a function that bounds interference in a non-preemptive stage
of execution. We prove that this function can be instantiated
with a constant function [λ F Δ ⟹ F - task_rtct tsk]. *)
Let IBF_NP (F : duration) (Δ : duration) := F - task_rtct tsk.
(** Let us re-iterate on the intuitive interpretation of this
function. Since [F] is a solution to the first equation
[task_rtct tsk + IBF_P tsk A F <= F], we know that by time
instant [t1 + F] a job receives [task_rtct tsk] units of service
and, hence, it becomes non-preemptive. Given this information,
how can we bound the job's interference in an interval <<[t1, t1
+ R)>>? Note that this interval starts with the beginning of the
busy interval. We know that the job receives [F - task_rtct tsk]
units of interference, and there will no more
interference. Hence, [IBF_NP tsk F Δ := F - task_rtct tsk]. *)
Lemma nonpreemptive_interference_is_bounded :
job_interference_is_bounded_by
arr_seq sched tsk IBF_NP (relative_time_to_reach_rtct sched tsk interference_bound_function).
Proof.
move => t1 t2 Δ j ARR TSK BUSY LT NCOM F RTC; specialize (RTC _ _ BUSY).
have POS : 0 < job_cost j.
{ move_neq_up ZE; move: ZE; rewrite leqn0; move => /eqP ZE.
move: NCOM => /negP NCOM; apply: NCOM.
by rewrite /completed_by ZE.
}
move: (BUSY) => [PREF _].
move : (TSK) (BUSY) RTC => /eqP TSKeq [[/andP [LE1 LE2] [QT1 AQT]] QT2] [LEQ RTC].
rewrite leq_subRL_impl // addnC -/cumulative_interference.
destruct (leqP t2 (t1 + F)) as [NEQ1|NEQ1]; last destruct (leqP F Δ) as [NEQ2|NEQ2].
{ rewrite -leq_subLR in NEQ1.
rewrite -(leqRW NEQ1) (leqRW RTC) (leqRW (service_at_most_cost _ _ _ _ _)) //
(leqRW (service_within_busy_interval_ge_job_cost _ _ _ _ _ _ _)) //.
rewrite (leqRW (cumulative_interference_sub _ _ t1 _ t1 t2 _ _)) //.
have LLL : (t1 < t2) = true by apply leq_ltn_trans with (t1 + Δ); lia.
interval_to_duration t1 t2 k.
eapply leq_trans.
- by rewrite addnC; (eapply service_and_interference_bounded with (t := t1) (t3 := t1+k)
|| eapply service_and_interference_bounded with (t := t1) (t2 := t1+k)).
- lia.
}
{ have NoInterference: cumulative_interference j (t1 + F) (t1 + Δ) = 0.
{ rewrite /cumulative_interference /cumul_cond_interference big_nat.
apply big1; move => t /andP [GE__t LT__t].
apply/eqP; rewrite eqb0; apply/negP; eapply H_work_conserving => //.
{ by apply/andP; split; lia. }
apply: H_ideal_progress_proc_model.
apply: job_nonpreemptive_after_run_to_completion_threshold GE__t _ _ => //.
- by rewrite -(leqRW RTC); apply H_valid_run_to_completion_threshold.
- by move: NCOM; apply contra; apply completion_monotonic; lia.
}
rewrite (leqRW RTC); erewrite cumulative_interference_cat with (t := t1 + F); last by lia.
rewrite /cumulative_interference in NoInterference.
rewrite NoInterference addn0; erewrite no_service_before_busy_interval => //.
by rewrite addnC; apply: service_and_interference_bounded.
}
{ rewrite (leqRW RTC) (leqRW (cumulative_interference_sub _ _ t1 _ t1 (t1 + F) _ _ )); try lia.
erewrite no_service_before_busy_interval => //.
by rewrite addnC; eapply service_and_interference_bounded. }
Qed.
(** For simplicity, let's define a local name for the search space. *)
Let is_in_search_space A := is_in_search_space L interference_bound_function A.
(** Consider any value [R] that upper-bounds the solution of each
response-time recurrence, i.e., for any relative arrival time A
in the search space, there exists a corresponding solution [F]
such that [F + (task_cost tsk - task_rtc tsk) <= R]. *)
Variable R : duration.
Hypothesis H_R_is_maximum_ideal :
forall A,
is_in_search_space A ->
exists F,
task_rtct tsk + interference_bound_function A (A + F) <= A + F /\
F + (task_cost tsk - task_rtct tsk) <= R.
(** Using the lemma about [IBF_NP], we instantiate the general RTA
theorem's result to the ideal uniprocessor's case to prove that
[R] is a response-time bound. *)
Theorem uniprocessor_response_time_bound_ideal :
task_response_time_bound arr_seq sched tsk R.
Proof.
eapply uniprocessor_response_time_bound with (IBF_NP := fun F Δ => F - task_rtct tsk) => //.
{ by apply nonpreemptive_interference_is_bounded. }
{ move => F _.
destruct (leqP F (task_rtct tsk)) as [NEQ|NEQ].
- eapply leq_trans; [apply NEQ | ].
by eapply leq_trans; [apply H_valid_run_to_completion_threshold | lia].
- by rewrite addnBCA; [lia | apply H_valid_run_to_completion_threshold | lia].
}
{ move => A SP; specialize (H_R_is_maximum_ideal A SP).
destruct H_R_is_maximum_ideal as [F [EQ1 EQ2]].
exists (A + F); split=> [//|].
eapply leq_trans; [ | by erewrite leq_add2l; apply EQ2].
by rewrite addnBCA; [lia | apply H_valid_run_to_completion_threshold | lia].
}
Qed.
End AbstractRTAIdeal.
This diff is collapsed.
This diff is collapsed.