Compare commits
86 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7da64958f0 | ||
|
|
49e965fd63 | ||
|
|
c09b95fafd | ||
|
|
88f09f2eac | ||
|
|
568c09ce3a | ||
|
|
79f6c538c1 | ||
|
|
3400e6d9db | ||
|
|
fbea9e74c2 | ||
|
|
4d929827ac | ||
|
|
37b3641f00 | ||
|
|
3d940f1d1b | ||
|
|
f0d2a3e2fe | ||
|
|
7d405a43b1 | ||
|
|
2dbe845f21 | ||
|
|
4efa225daa | ||
|
|
0ee1955d5b | ||
|
|
a0eb50be3e | ||
|
|
2da9d7577f | ||
|
|
d5e2a071c1 | ||
|
|
633dd420d9 | ||
|
|
780b4e08cb | ||
|
|
281185e49d | ||
|
|
cb240b4f4c | ||
|
|
ff5ee88ac0 | ||
|
|
c3ef958116 | ||
|
|
727806f483 | ||
|
|
ec6c9d3637 | ||
|
|
c5e88dbbda | ||
|
|
c3e4d65dd1 | ||
|
|
16207ae32f | ||
|
|
dbf2ca1b0a | ||
|
|
0e204784ee | ||
|
|
c44406e091 | ||
|
|
1da82633b2 | ||
|
|
ad6636f09c | ||
|
|
8f4b366c0f | ||
|
|
5f8960f5ef | ||
|
|
d9200eb55d | ||
|
|
ccdc1d3777 | ||
|
|
197c4ddcbd | ||
|
|
7287947535 | ||
|
|
b0be164419 | ||
|
|
fab06842d5 | ||
|
|
9b4eafc54a | ||
|
|
9becb117b2 | ||
|
|
87ab3e4d16 | ||
|
|
9b8305cc11 | ||
|
|
728e5720cc | ||
|
|
6c11685863 | ||
|
|
0f783a4ce0 | ||
|
|
02dba17a59 | ||
|
|
860e7307bc | ||
|
|
7b087b8ac5 | ||
|
|
15f39300c0 | ||
|
|
f6f69516ac | ||
|
|
5f9dd18a87 | ||
|
|
7d8639b4a8 | ||
|
|
6e723ff755 | ||
|
|
bff97ed4cc | ||
|
|
97e2959452 | ||
|
|
91970658ec | ||
|
|
fc2585af00 | ||
|
|
f95edd3a85 | ||
|
|
2b9ed76734 | ||
|
|
5a041bff3d | ||
|
|
f61664c6f8 | ||
|
|
e2f05e9328 | ||
|
|
ea0b53b150 | ||
|
|
56664826b2 | ||
|
|
bbb49dec38 | ||
|
|
8211ebf759 | ||
|
|
1f3121b6b2 | ||
|
|
d7820faf7c | ||
|
|
5d29d52445 | ||
|
|
be11187e09 | ||
|
|
9044091e21 | ||
|
|
170ba8dfcb | ||
|
|
e3f1b6fc54 | ||
|
|
7ac05528ca | ||
|
|
64f411f0fb | ||
|
|
2e2dd96ae4 | ||
|
|
a311291294 | ||
|
|
5221787303 | ||
|
|
63d0c7fcae | ||
|
|
8abef50e97 | ||
|
|
0306e75a5f |
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
name: Backport
|
||||
steps:
|
||||
- name: Backport
|
||||
|
||||
4
.github/workflows/backport_issue_check.yml
vendored
4
.github/workflows/backport_issue_check.yml
vendored
@@ -8,11 +8,11 @@ on:
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport Issue Check
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out source code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
|
||||
@@ -8,7 +8,7 @@ on:
|
||||
jobs:
|
||||
bluetooth-test-results:
|
||||
name: "Publish Bluetooth Test Results"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.event.workflow_run.conclusion != 'skipped'
|
||||
|
||||
steps:
|
||||
|
||||
22
.github/workflows/bluetooth-tests.yaml
vendored
22
.github/workflows/bluetooth-tests.yaml
vendored
@@ -10,17 +10,13 @@ on:
|
||||
- "soc/posix/**"
|
||||
- "arch/posix/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
bluetooth-test-prep:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
bluetooth-test-build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: bluetooth-test-prep
|
||||
bluetooth-test:
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -38,7 +34,7 @@ jobs:
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: west setup
|
||||
run: |
|
||||
@@ -55,7 +51,7 @@ jobs:
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bluetooth-test-results
|
||||
path: |
|
||||
@@ -64,7 +60,7 @@ jobs:
|
||||
|
||||
- name: Upload Event Details
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: event
|
||||
path: |
|
||||
|
||||
34
.github/workflows/clang.yaml
vendored
34
.github/workflows/clang.yaml
vendored
@@ -2,22 +2,18 @@ name: Build with Clang/LLVM
|
||||
|
||||
on: pull_request_target
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
clang-build-prep:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
clang-build:
|
||||
runs-on: zephyr_runner
|
||||
needs: clang-build-prep
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -30,12 +26,14 @@ jobs:
|
||||
outputs:
|
||||
report_needed: ${{ steps.twister.outputs.report_needed }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -72,7 +70,7 @@ jobs:
|
||||
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
uses: nashif/action-s3-cache@master
|
||||
@@ -100,12 +98,12 @@ jobs:
|
||||
|
||||
# We can limit scope to just what has changed
|
||||
if [ -s testplan.csv ]; then
|
||||
echo "::set-output name=report_needed::1";
|
||||
echo "report_needed=1" >> $GITHUB_OUTPUT
|
||||
# Full twister but with options based on changes
|
||||
./scripts/twister --inline-logs -M -N -v --load-tests testplan.csv --retry-failed 2
|
||||
else
|
||||
# if nothing is run, skip reporting step
|
||||
echo "::set-output name=report_needed::0";
|
||||
echo "report_needed=0" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: ccache stats post
|
||||
@@ -114,7 +112,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results
|
||||
if: always() && steps.twister.outputs.report_needed != 0
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Unit Test Results (Subset ${{ matrix.platform }})
|
||||
path: twister-out/twister.xml
|
||||
|
||||
36
.github/workflows/codecov.yaml
vendored
36
.github/workflows/codecov.yaml
vendored
@@ -4,22 +4,18 @@ on:
|
||||
schedule:
|
||||
- cron: '25 */3 * * 1-5'
|
||||
|
||||
jobs:
|
||||
codecov-prep:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
codecov:
|
||||
runs-on: zephyr_runner
|
||||
needs: codecov-prep
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -32,8 +28,14 @@ jobs:
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -54,7 +56,7 @@ jobs:
|
||||
run: |
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
@@ -94,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Upload Coverage Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Coverage Data (Subset ${{ matrix.platform }})
|
||||
path: coverage/reports/${{ matrix.platform }}.info
|
||||
@@ -108,7 +110,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Download Artifacts
|
||||
@@ -144,8 +146,8 @@ jobs:
|
||||
set(MERGELIST "${MERGELIST} -a ${f}")
|
||||
endif()
|
||||
endforeach()
|
||||
message("::set-output name=mergefiles::${MERGELIST}")
|
||||
message("::set-output name=covfiles::${FILELIST}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "mergefiles=${MERGELIST}\n")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "covfiles=${FILELIST}\n")
|
||||
|
||||
- name: Merge coverage files
|
||||
run: |
|
||||
|
||||
6
.github/workflows/coding_guidelines.yml
vendored
6
.github/workflows/coding_guidelines.yml
vendored
@@ -4,17 +4,17 @@ on: pull_request
|
||||
|
||||
jobs:
|
||||
compliance_job:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Run coding guidelines checks on patch series (PR)
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-doc-pip
|
||||
|
||||
12
.github/workflows/compliance.yml
vendored
12
.github/workflows/compliance.yml
vendored
@@ -4,11 +4,11 @@ on: pull_request
|
||||
|
||||
jobs:
|
||||
maintainer_check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Check MAINTAINERS file
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
python3 ./scripts/get_maintainer.py path CMakeLists.txt
|
||||
|
||||
check_compliance:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Run compliance checks on patch series (PR)
|
||||
steps:
|
||||
- name: Update PATH for west
|
||||
@@ -28,13 +28,13 @@ jobs:
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-doc-pip
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
./scripts/ci/check_compliance.py -m Codeowners -m Devicetree -m Gitlint -m Identity -m Nits -m pylint -m checkpatch -m Kconfig -c origin/${BASE_REF}..
|
||||
|
||||
- name: upload-results
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: compliance.xml
|
||||
|
||||
4
.github/workflows/daily_test_version.yml
vendored
4
.github/workflows/daily_test_version.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
|
||||
jobs:
|
||||
get_version:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
pip3 install gitpython
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
20
.github/workflows/devicetree_checks.yml
vendored
20
.github/workflows/devicetree_checks.yml
vendored
@@ -6,10 +6,14 @@ name: Devicetree script tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/dts/**'
|
||||
- '.github/workflows/devicetree_checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/dts/**'
|
||||
- '.github/workflows/devicetree_checks.yml'
|
||||
@@ -21,20 +25,22 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-20.04, macos-11, windows-2022]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
- os: macos-11
|
||||
python-version: 3.6
|
||||
- os: windows-2022
|
||||
python-version: 3.6
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
@@ -42,7 +48,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
- name: cache-pip-mac
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
# Trailing '-' was just to get a different cache name
|
||||
@@ -51,7 +57,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}-
|
||||
- name: cache-pip-win
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
54
.github/workflows/doc-build.yml
vendored
54
.github/workflows/doc-build.yml
vendored
@@ -5,10 +5,10 @@ name: Documentation Build
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 */3 * * *'
|
||||
- cron: '0 */3 * * *'
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- v*
|
||||
pull_request:
|
||||
paths:
|
||||
- 'doc/**'
|
||||
@@ -34,18 +34,23 @@ env:
|
||||
jobs:
|
||||
doc-build-html:
|
||||
name: "Documentation Build (HTML)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
|
||||
concurrency:
|
||||
group: doc-build-html-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: install-pkgs
|
||||
run: |
|
||||
sudo apt-get install -y ninja-build doxygen graphviz
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: pip-${{ hashFiles('scripts/requirements-doc.txt') }}
|
||||
@@ -69,26 +74,53 @@ jobs:
|
||||
DOC_TAG="development"
|
||||
fi
|
||||
|
||||
DOC_TAG=${DOC_TAG} SPHINXOPTS="-q -W -j auto" make -C doc html
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
DOC_TARGET="html-fast"
|
||||
else
|
||||
DOC_TARGET="html"
|
||||
fi
|
||||
|
||||
DOC_TAG=${DOC_TAG} SPHINXOPTS="-q -W" make -C doc ${DOC_TARGET}
|
||||
|
||||
- name: compress-docs
|
||||
run: |
|
||||
tar cfJ html-output.tar.xz --directory=doc/_build html
|
||||
|
||||
- name: upload-build
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: html-output
|
||||
path: html-output.tar.xz
|
||||
|
||||
- name: process-pr
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
REPO_NAME="${{ github.event.repository.name }}"
|
||||
PR_NUM="${{ github.event.pull_request.number }}"
|
||||
DOC_URL="https://builds.zephyrproject.io/${REPO_NAME}/pr/${PR_NUM}/docs/"
|
||||
|
||||
echo "${PR_NUM}" > pr_num
|
||||
echo "::notice:: Documentation will be available shortly at: ${DOC_URL}"
|
||||
|
||||
- name: upload-pr-number
|
||||
uses: actions/upload-artifact@v3
|
||||
if: github.event_name == 'pull_request'
|
||||
with:
|
||||
name: pr_num
|
||||
path: pr_num
|
||||
|
||||
doc-build-pdf:
|
||||
name: "Documentation Build (PDF)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
container: texlive/texlive:latest
|
||||
timeout-minutes: 30
|
||||
concurrency:
|
||||
group: doc-build-pdf-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: install-pkgs
|
||||
run: |
|
||||
@@ -96,7 +128,7 @@ jobs:
|
||||
apt-get install -y python3-pip ninja-build doxygen graphviz librsvg2-bin
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: pip-${{ hashFiles('scripts/requirements-doc.txt') }}
|
||||
@@ -123,7 +155,7 @@ jobs:
|
||||
DOC_TAG=${DOC_TAG} SPHINXOPTS="-q -j auto" LATEXMKOPTS="-quiet -halt-on-error" make -C doc pdf
|
||||
|
||||
- name: upload-build
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pdf-output
|
||||
path: doc/_build/latex/zephyr.pdf
|
||||
|
||||
63
.github/workflows/doc-publish-pr.yml
vendored
Normal file
63
.github/workflows/doc-publish-pr.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# Copyright (c) 2020 Linaro Limited.
|
||||
# Copyright (c) 2021 Nordic Semiconductor ASA
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Documentation Publish (Pull Request)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Documentation Build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
doc-publish:
|
||||
name: Publish Documentation
|
||||
runs-on: ubuntu-20.04
|
||||
if: |
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: doc-build.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Load PR number
|
||||
run: |
|
||||
echo "PR_NUM=$(<pr_num/pr_num)" >> $GITHUB_ENV
|
||||
|
||||
- name: Check PR number
|
||||
id: check-pr
|
||||
uses: carpentries/actions/check-valid-pr@v0.8
|
||||
with:
|
||||
pr: ${{ env.PR_NUM }}
|
||||
sha: ${{ github.event.workflow_run.head_sha }}
|
||||
|
||||
- name: Validate PR number
|
||||
if: steps.check-pr.outputs.VALID != 'true'
|
||||
run: |
|
||||
echo "ABORT: PR number validation failed!"
|
||||
exit 1
|
||||
|
||||
- name: Uncompress HTML docs
|
||||
run: |
|
||||
tar xf html-output/html-output.tar.xz -C html-output
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_BUILDS_ZEPHYR_PR_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_BUILDS_ZEPHYR_PR_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Upload to AWS S3
|
||||
env:
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
aws s3 sync --quiet html-output/html \
|
||||
s3://builds.zephyrproject.org/${{ github.event.repository.name }}/pr/${PR_NUM}/docs \
|
||||
--delete
|
||||
12
.github/workflows/doc-publish.yml
vendored
12
.github/workflows/doc-publish.yml
vendored
@@ -2,23 +2,21 @@
|
||||
# Copyright (c) 2021 Nordic Semiconductor ASA
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Publish Documentation
|
||||
name: Documentation Publish
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Documentation Build"]
|
||||
branches:
|
||||
- main
|
||||
- v*
|
||||
tags:
|
||||
- v*
|
||||
- main
|
||||
- v*
|
||||
types:
|
||||
- completed
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
doc-publish:
|
||||
name: Publish Documentation
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
|
||||
steps:
|
||||
|
||||
4
.github/workflows/errno.yml
vendored
4
.github/workflows/errno.yml
vendored
@@ -6,13 +6,13 @@ on:
|
||||
|
||||
jobs:
|
||||
check-errno:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run errno.py
|
||||
run: |
|
||||
|
||||
17
.github/workflows/footprint-tracking.yml
vendored
17
.github/workflows/footprint-tracking.yml
vendored
@@ -13,19 +13,14 @@ on:
|
||||
# same commit
|
||||
- 'v*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
footprint-tracking-cancel:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
footprint-tracking:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
needs: footprint-tracking-cancel
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -44,7 +39,7 @@ jobs:
|
||||
sudo pip3 install -U setuptools wheel pip gitpython
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
21
.github/workflows/footprint.yml
vendored
21
.github/workflows/footprint.yml
vendored
@@ -2,19 +2,14 @@ name: Footprint Delta
|
||||
|
||||
on: pull_request
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
footprint-cancel:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
footprint-delta:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
needs: footprint-cancel
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -25,16 +20,12 @@ jobs:
|
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12
|
||||
ZEPHYR_TOOLCHAIN_VARIANT: zephyr
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
- name: Update PATH for west
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
6
.github/workflows/issue_count.yml
vendored
6
.github/workflows/issue_count.yml
vendored
@@ -14,13 +14,13 @@ env:
|
||||
jobs:
|
||||
track-issues:
|
||||
name: "Collect Issue Stats"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
- name: Download configuration file
|
||||
run: |
|
||||
wget -q https://raw.githubusercontent.com/$GITHUB_REPOSITORY/master/.github/workflows/issues-report-config.json
|
||||
wget -q https://raw.githubusercontent.com/$GITHUB_REPOSITORY/main/.github/workflows/issues-report-config.json
|
||||
|
||||
- name: install-packages
|
||||
run: |
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: upload-stats
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: ${{ env.OUTPUT_FILE_NAME }}
|
||||
|
||||
4
.github/workflows/license_check.yml
vendored
4
.github/workflows/license_check.yml
vendored
@@ -4,7 +4,7 @@ on: [pull_request]
|
||||
|
||||
jobs:
|
||||
scancode_job:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Scan code for licenses
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
with:
|
||||
directory-to-scan: 'scan/'
|
||||
- name: Artifact Upload
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: scancode
|
||||
path: ./artifacts
|
||||
|
||||
4
.github/workflows/manifest.yml
vendored
4
.github/workflows/manifest.yml
vendored
@@ -6,11 +6,11 @@ on:
|
||||
|
||||
jobs:
|
||||
contribs:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Manifest
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: zephyrproject/zephyr
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
9
.github/workflows/release.yml
vendored
9
.github/workflows/release.yml
vendored
@@ -7,15 +7,16 @@ on:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get the version
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: REUSE Compliance Check
|
||||
uses: fsfe/reuse-action@v1
|
||||
@@ -23,7 +24,7 @@ jobs:
|
||||
args: spdx -o zephyr-${{ steps.get_version.outputs.VERSION }}.spdx
|
||||
|
||||
- name: upload-results
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: zephyr-${{ steps.get_version.outputs.VERSION }}.spdx
|
||||
|
||||
2
.github/workflows/stale_issue.yml
vendored
2
.github/workflows/stale_issue.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
stale:
|
||||
name: Find Stale issues and PRs
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
|
||||
55
.github/workflows/twister.yaml
vendored
55
.github/workflows/twister.yaml
vendored
@@ -2,29 +2,27 @@ name: Run tests with twister
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
pull_request_target:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
schedule:
|
||||
# Run at 00:00 on Saturday
|
||||
- cron: '20 0 * * 6'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
twister-build-cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
twister-build-prep:
|
||||
|
||||
runs-on: zephyr_runner
|
||||
needs: twister-build-cleanup
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
outputs:
|
||||
subset: ${{ steps.output-services.outputs.subset }}
|
||||
size: ${{ steps.output-services.outputs.size }}
|
||||
@@ -38,14 +36,16 @@ jobs:
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
if: github.event_name == 'pull_request_target'
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -102,18 +102,18 @@ jobs:
|
||||
else
|
||||
size=0
|
||||
fi
|
||||
echo "::set-output name=subset::${subset}";
|
||||
echo "::set-output name=size::${size}";
|
||||
echo "subset=${subset}" >> $GITHUB_OUTPUT
|
||||
echo "size=${size}" >> $GITHUB_OUTPUT
|
||||
|
||||
twister-build:
|
||||
runs-on: zephyr_runner
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
needs: twister-build-prep
|
||||
if: needs.twister-build-prep.outputs.size != 0
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.18.4
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -128,13 +128,14 @@ jobs:
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -173,7 +174,7 @@ jobs:
|
||||
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
@@ -220,7 +221,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Unit Test Results (Subset ${{ matrix.subset }})
|
||||
if-no-files-found: ignore
|
||||
@@ -231,7 +232,7 @@ jobs:
|
||||
twister-test-results:
|
||||
name: "Publish Unit Tests Results"
|
||||
needs: twister-build
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
# the build-and-test job might be skipped, we don't need to run this job then
|
||||
if: success() || failure()
|
||||
|
||||
|
||||
12
.github/workflows/twister_tests.yml
vendored
12
.github/workflows/twister_tests.yml
vendored
@@ -5,12 +5,16 @@ name: Twister TestSuite
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/pylib/twister/**'
|
||||
- 'scripts/twister'
|
||||
- 'scripts/tests/twister/**'
|
||||
- '.github/workflows/twister_tests.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/pylib/twister/**'
|
||||
- 'scripts/twister'
|
||||
@@ -24,17 +28,17 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest]
|
||||
os: [ubuntu-20.04]
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
20
.github/workflows/west_cmds.yml
vendored
20
.github/workflows/west_cmds.yml
vendored
@@ -5,11 +5,15 @@ name: Zephyr West Command Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/west-commands.yml'
|
||||
- 'scripts/west_commands/**'
|
||||
- '.github/workflows/west_cmds.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- v2.7-branch
|
||||
paths:
|
||||
- 'scripts/west-commands.yml'
|
||||
- 'scripts/west_commands/**'
|
||||
@@ -22,20 +26,22 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-20.04, macos-11, windows-2022]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
- os: macos-11
|
||||
python-version: 3.6
|
||||
- os: windows-2022
|
||||
python-version: 3.6
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
@@ -43,7 +49,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
- name: cache-pip-mac
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
# Trailing '-' was just to get a different cache name
|
||||
@@ -52,7 +58,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}-
|
||||
- name: cache-pip-win
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
2
VERSION
2
VERSION
@@ -1,5 +1,5 @@
|
||||
VERSION_MAJOR = 2
|
||||
VERSION_MINOR = 7
|
||||
PATCHLEVEL = 3
|
||||
PATCHLEVEL = 4
|
||||
VERSION_TWEAK = 0
|
||||
EXTRAVERSION =
|
||||
|
||||
@@ -56,7 +56,7 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
* arc_cpu_wake_flag will protect arc_cpu_sp that
|
||||
* only one slave cpu can read it per time
|
||||
*/
|
||||
arc_cpu_sp = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
arc_cpu_sp = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
arc_cpu_wake_flag = cpu_num;
|
||||
|
||||
|
||||
@@ -163,12 +163,23 @@ endforeach()
|
||||
unset(EXTRA_KCONFIG_OPTIONS)
|
||||
get_cmake_property(cache_variable_names CACHE_VARIABLES)
|
||||
foreach (name ${cache_variable_names})
|
||||
if("${name}" MATCHES "^CONFIG_")
|
||||
if("${name}" MATCHES "^CLI_CONFIG_")
|
||||
# Variable was set by user in earlier invocation, let's append to extra
|
||||
# config unless a new value has been given.
|
||||
string(REGEX REPLACE "^CLI_" "" org_name ${name})
|
||||
if(NOT DEFINED ${org_name})
|
||||
set(EXTRA_KCONFIG_OPTIONS
|
||||
"${EXTRA_KCONFIG_OPTIONS}\n${org_name}=${${name}}"
|
||||
)
|
||||
endif()
|
||||
elseif("${name}" MATCHES "^CONFIG_")
|
||||
# When a cache variable starts with 'CONFIG_', it is assumed to be
|
||||
# a Kconfig symbol assignment from the CMake command line.
|
||||
set(EXTRA_KCONFIG_OPTIONS
|
||||
"${EXTRA_KCONFIG_OPTIONS}\n${name}=${${name}}"
|
||||
)
|
||||
set(CLI_${name} "${${name}}")
|
||||
list(APPEND cli_config_list ${name})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
@@ -296,21 +307,20 @@ add_custom_target(config-twister DEPENDS ${DOTCONFIG})
|
||||
# Remove the CLI Kconfig symbols from the namespace and
|
||||
# CMakeCache.txt. If the symbols end up in DOTCONFIG they will be
|
||||
# re-introduced to the namespace through 'import_kconfig'.
|
||||
foreach (name ${cache_variable_names})
|
||||
if("${name}" MATCHES "^CONFIG_")
|
||||
unset(${name})
|
||||
unset(${name} CACHE)
|
||||
endif()
|
||||
foreach (name ${cli_config_list})
|
||||
unset(${name})
|
||||
unset(${name} CACHE)
|
||||
endforeach()
|
||||
|
||||
# Parse the lines prefixed with CONFIG_ in the .config file from Kconfig
|
||||
import_kconfig(CONFIG_ ${DOTCONFIG})
|
||||
|
||||
# Re-introduce the CLI Kconfig symbols that survived
|
||||
foreach (name ${cache_variable_names})
|
||||
if("${name}" MATCHES "^CONFIG_")
|
||||
if(DEFINED ${name})
|
||||
set(${name} ${${name}} CACHE STRING "")
|
||||
endif()
|
||||
# Cache the CLI Kconfig symbols that survived through Kconfig, prefixed with CLI_.
|
||||
# Remove those who might have changed compared to earlier runs, if they no longer appears.
|
||||
foreach (name ${cli_config_list})
|
||||
if(DEFINED ${name})
|
||||
set(CLI_${name} ${CLI_${name}} CACHE INTERNAL "")
|
||||
else()
|
||||
unset(CLI_${name} CACHE)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
@@ -2,6 +2,60 @@
|
||||
|
||||
.. _zephyr_2.7:
|
||||
|
||||
.. _zephyr_2.7.4:
|
||||
|
||||
Zephyr 2.7.4
|
||||
####################
|
||||
|
||||
This is an LTS maintenance release with fixes.
|
||||
|
||||
Issues Fixed
|
||||
************
|
||||
|
||||
These GitHub issues were addressed since the previous 2.7.3 tagged
|
||||
release:
|
||||
|
||||
.. comment List derived from GitHub Issue query: ...
|
||||
* :github:`issuenumber` - issue title
|
||||
|
||||
* :github:`25417` - net: socket: socketpair: check for ISR context
|
||||
* :github:`41012` - irq_enable() doesn’t support enabling NVIC IRQ number more than 127
|
||||
* :github:`44070` - west spdx TypeError: 'NoneType' object is not iterable
|
||||
* :github:`46072` - subsys/hawkBit: Debug log error in hawkbit example "CONFIG_LOG_STRDUP_MAX_STRING"
|
||||
* :github:`48056` - Possible null pointer dereference after k_mutex_lock times out
|
||||
* :github:`49102` - hawkbit - dns name randomly not resolved
|
||||
* :github:`49139` - can't run west or DT tests on windows / py 3.6
|
||||
* :github:`49564` - Newer versions of pylink are not supported in latest zephyr 2.7 release
|
||||
* :github:`49569` - Backport cmake string cache fix to v2.7 branch
|
||||
* :github:`50221` - tests: debug: test case subsys/debug/coredump failed on acrn_ehl_crb on branch v2.7
|
||||
* :github:`50467` - Possible memory corruption on ARC when userspace is enabled
|
||||
* :github:`50468` - Incorrect Z_THREAD_STACK_BUFFER in arch_start_cpu for Xtensa
|
||||
* :github:`50961` - drivers: counter: Update counter_set_channel_alarm documentation
|
||||
* :github:`51714` - Bluetooth: Application with buffer that cannot unref it in disconnect handler leads to advertising issues
|
||||
* :github:`51776` - POSIX API is not portable across arches
|
||||
* :github:`52247` - mgmt: mcumgr: image upload, then image erase, then image upload does not restart upload from start
|
||||
* :github:`52517` - lib: posix: sleep() does not return the number of seconds left if interrupted
|
||||
* :github:`52518` - lib: posix: usleep() does not follow the POSIX spec
|
||||
* :github:`52542` - lib: posix: make sleep() and usleep() standards-compliant
|
||||
* :github:`52591` - mcumgr user data size out of sync with net buffer user data size
|
||||
* :github:`52829` - kernel/sched: Fix SMP race on pend
|
||||
* :github:`53088` - Unable to chage initialization priority of logging subsys
|
||||
|
||||
Security Vulnerability Related
|
||||
******************************
|
||||
|
||||
The following security vulnerabilities (CVEs) were addressed in this
|
||||
release:
|
||||
|
||||
* CVE-2022-2741: `Zephyr project bug tracker GHSA-hx5v-j59q-c3j8
|
||||
<https://github.com/zephyrproject-rtos/zephyr/security/advisories/GHSA-hx5v-j59q-c3j8>`_
|
||||
|
||||
* CVE-2022-1841: `Zephyr project bug tracker GHSA-5c3j-p8cr-2pgh
|
||||
<https://github.com/zephyrproject-rtos/zephyr/security/advisories/GHSA-5c3j-p8cr-2pgh>`_
|
||||
|
||||
More detailed information can be found in:
|
||||
https://docs.zephyrproject.org/latest/security/vulnerabilities.html
|
||||
|
||||
.. _zephyr_2.7.3:
|
||||
|
||||
Zephyr 2.7.3
|
||||
|
||||
@@ -385,6 +385,7 @@ static inline int z_impl_counter_get_value(const struct device *dev,
|
||||
* interrupts or requested channel).
|
||||
* @retval -EINVAL if alarm settings are invalid.
|
||||
* @retval -ETIME if absolute alarm was set too late.
|
||||
* @retval -EBUSY if alarm is already active.
|
||||
*/
|
||||
__syscall int counter_set_channel_alarm(const struct device *dev,
|
||||
uint8_t chan_id,
|
||||
|
||||
@@ -161,15 +161,21 @@ int z_impl_k_mutex_lock(struct k_mutex *mutex, k_timeout_t timeout)
|
||||
|
||||
key = k_spin_lock(&lock);
|
||||
|
||||
struct k_thread *waiter = z_waitq_head(&mutex->wait_q);
|
||||
/*
|
||||
* Check if mutex was unlocked after this thread was unpended.
|
||||
* If so, skip adjusting owner's priority down.
|
||||
*/
|
||||
if (likely(mutex->owner != NULL)) {
|
||||
struct k_thread *waiter = z_waitq_head(&mutex->wait_q);
|
||||
|
||||
new_prio = (waiter != NULL) ?
|
||||
new_prio_for_inheritance(waiter->base.prio, mutex->owner_orig_prio) :
|
||||
mutex->owner_orig_prio;
|
||||
new_prio = (waiter != NULL) ?
|
||||
new_prio_for_inheritance(waiter->base.prio, mutex->owner_orig_prio) :
|
||||
mutex->owner_orig_prio;
|
||||
|
||||
LOG_DBG("adjusting prio down on mutex %p", mutex);
|
||||
LOG_DBG("adjusting prio down on mutex %p", mutex);
|
||||
|
||||
resched = adjust_owner_prio(mutex, new_prio) || resched;
|
||||
resched = adjust_owner_prio(mutex, new_prio) || resched;
|
||||
}
|
||||
|
||||
if (resched) {
|
||||
z_reschedule(&lock, key);
|
||||
|
||||
@@ -626,17 +626,13 @@ static void add_thread_timeout(struct k_thread *thread, k_timeout_t timeout)
|
||||
}
|
||||
}
|
||||
|
||||
static void pend(struct k_thread *thread, _wait_q_t *wait_q,
|
||||
k_timeout_t timeout)
|
||||
static void pend_locked(struct k_thread *thread, _wait_q_t *wait_q,
|
||||
k_timeout_t timeout)
|
||||
{
|
||||
#ifdef CONFIG_KERNEL_COHERENCE
|
||||
__ASSERT_NO_MSG(wait_q == NULL || arch_mem_coherent(wait_q));
|
||||
#endif
|
||||
|
||||
LOCKED(&sched_spinlock) {
|
||||
add_to_waitq_locked(thread, wait_q);
|
||||
}
|
||||
|
||||
add_to_waitq_locked(thread, wait_q);
|
||||
add_thread_timeout(thread, timeout);
|
||||
}
|
||||
|
||||
@@ -644,7 +640,9 @@ void z_pend_thread(struct k_thread *thread, _wait_q_t *wait_q,
|
||||
k_timeout_t timeout)
|
||||
{
|
||||
__ASSERT_NO_MSG(thread == _current || is_thread_dummy(thread));
|
||||
pend(thread, wait_q, timeout);
|
||||
LOCKED(&sched_spinlock) {
|
||||
pend_locked(thread, wait_q, timeout);
|
||||
}
|
||||
}
|
||||
|
||||
static inline void unpend_thread_no_timeout(struct k_thread *thread)
|
||||
@@ -686,7 +684,12 @@ void z_thread_timeout(struct _timeout *timeout)
|
||||
|
||||
int z_pend_curr_irqlock(uint32_t key, _wait_q_t *wait_q, k_timeout_t timeout)
|
||||
{
|
||||
pend(_current, wait_q, timeout);
|
||||
/* This is a legacy API for pre-switch architectures and isn't
|
||||
* correctly synchronized for multi-cpu use
|
||||
*/
|
||||
__ASSERT_NO_MSG(!IS_ENABLED(CONFIG_SMP));
|
||||
|
||||
pend_locked(_current, wait_q, timeout);
|
||||
|
||||
#if defined(CONFIG_TIMESLICING) && defined(CONFIG_SWAP_NONATOMIC)
|
||||
pending_current = _current;
|
||||
@@ -709,8 +712,20 @@ int z_pend_curr(struct k_spinlock *lock, k_spinlock_key_t key,
|
||||
#if defined(CONFIG_TIMESLICING) && defined(CONFIG_SWAP_NONATOMIC)
|
||||
pending_current = _current;
|
||||
#endif
|
||||
pend(_current, wait_q, timeout);
|
||||
return z_swap(lock, key);
|
||||
__ASSERT_NO_MSG(sizeof(sched_spinlock) == 0 || lock != &sched_spinlock);
|
||||
|
||||
/* We do a "lock swap" prior to calling z_swap(), such that
|
||||
* the caller's lock gets released as desired. But we ensure
|
||||
* that we hold the scheduler lock and leave local interrupts
|
||||
* masked until we reach the context swich. z_swap() itself
|
||||
* has similar code; the duplication is because it's a legacy
|
||||
* API that doesn't expect to be called with scheduler lock
|
||||
* held.
|
||||
*/
|
||||
(void) k_spin_lock(&sched_spinlock);
|
||||
pend_locked(_current, wait_q, timeout);
|
||||
k_spin_release(lock);
|
||||
return z_swap(&sched_spinlock, key);
|
||||
}
|
||||
|
||||
struct k_thread *z_unpend1_no_timeout(_wait_q_t *wait_q)
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#include <errno.h>
|
||||
|
||||
#include <kernel.h>
|
||||
#include <posix/unistd.h>
|
||||
|
||||
@@ -14,8 +16,12 @@
|
||||
*/
|
||||
unsigned sleep(unsigned int seconds)
|
||||
{
|
||||
k_sleep(K_SECONDS(seconds));
|
||||
return 0;
|
||||
int rem;
|
||||
|
||||
rem = k_sleep(K_SECONDS(seconds));
|
||||
__ASSERT_NO_MSG(rem >= 0);
|
||||
|
||||
return rem / MSEC_PER_SEC;
|
||||
}
|
||||
/**
|
||||
* @brief Suspend execution for microsecond intervals.
|
||||
@@ -24,10 +30,19 @@ unsigned sleep(unsigned int seconds)
|
||||
*/
|
||||
int usleep(useconds_t useconds)
|
||||
{
|
||||
if (useconds < USEC_PER_MSEC) {
|
||||
k_busy_wait(useconds);
|
||||
} else {
|
||||
k_msleep(useconds / USEC_PER_MSEC);
|
||||
int32_t rem;
|
||||
|
||||
if (useconds >= USEC_PER_SEC) {
|
||||
errno = EINVAL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
rem = k_usleep(useconds);
|
||||
__ASSERT_NO_MSG(rem >= 0);
|
||||
if (rem > 0) {
|
||||
/* sleep was interrupted by a call to k_wakeup() */
|
||||
errno = EINTR;
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
@@ -19,3 +19,6 @@ CONFIG_STATS_NAMES=n
|
||||
|
||||
# Disable Logging for footprint reduction
|
||||
CONFIG_LOG=n
|
||||
|
||||
# Network settings
|
||||
CONFIG_NET_BUF_USER_DATA_SIZE=8
|
||||
|
||||
@@ -16,3 +16,6 @@ CONFIG_SYSTEM_WORKQUEUE_STACK_SIZE=2304
|
||||
|
||||
# Enable file system commands
|
||||
CONFIG_MCUMGR_CMD_FS_MGMT=y
|
||||
|
||||
# Network settings
|
||||
CONFIG_NET_BUF_USER_DATA_SIZE=8
|
||||
|
||||
@@ -2769,18 +2769,6 @@ class _BindingLoader(Loader):
|
||||
# Add legacy '!include foo.yaml' handling
|
||||
_BindingLoader.add_constructor("!include", _binding_include)
|
||||
|
||||
# Use OrderedDict instead of plain dict for YAML mappings, to preserve
|
||||
# insertion order on Python 3.5 and earlier (plain dicts only preserve
|
||||
# insertion order on Python 3.6+). This makes testing easier and avoids
|
||||
# surprises.
|
||||
#
|
||||
# Adapted from
|
||||
# https://stackoverflow.com/questions/5121931/in-python-how-can-you-load-yaml-mappings-as-ordereddicts.
|
||||
# Hopefully this API stays stable.
|
||||
_BindingLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
lambda loader, node: OrderedDict(loader.construct_pairs(node)))
|
||||
|
||||
#
|
||||
# "Default" binding for properties which are defined by the spec.
|
||||
#
|
||||
|
||||
@@ -5,7 +5,7 @@ envlist=py3
|
||||
deps =
|
||||
setuptools-scm
|
||||
pytest
|
||||
types-PyYAML
|
||||
types-PyYAML==6.0.7
|
||||
mypy
|
||||
setenv =
|
||||
TOXTEMPDIR={envtmpdir}
|
||||
|
||||
@@ -16,6 +16,7 @@ import tempfile
|
||||
from runners.core import ZephyrBinaryRunner, RunnerCaps
|
||||
|
||||
try:
|
||||
import pylink
|
||||
from pylink.library import Library
|
||||
MISSING_REQUIREMENTS = False
|
||||
except ImportError:
|
||||
@@ -141,16 +142,23 @@ class JLinkBinaryRunner(ZephyrBinaryRunner):
|
||||
# to load the shared library distributed with the tools, which
|
||||
# provides an API call for getting the version.
|
||||
if not hasattr(self, '_jlink_version'):
|
||||
# pylink 0.14.0/0.14.1 exposes JLink SDK DLL (libjlinkarm) in
|
||||
# JLINK_SDK_STARTS_WITH, while other versions use JLINK_SDK_NAME
|
||||
if pylink.__version__ in ('0.14.0', '0.14.1'):
|
||||
sdk = Library.JLINK_SDK_STARTS_WITH
|
||||
else:
|
||||
sdk = Library.JLINK_SDK_NAME
|
||||
|
||||
plat = sys.platform
|
||||
if plat.startswith('win32'):
|
||||
libname = Library.get_appropriate_windows_sdk_name() + '.dll'
|
||||
elif plat.startswith('linux'):
|
||||
libname = Library.JLINK_SDK_NAME + '.so'
|
||||
libname = sdk + '.so'
|
||||
elif plat.startswith('darwin'):
|
||||
libname = Library.JLINK_SDK_NAME + '.dylib'
|
||||
libname = sdk + '.dylib'
|
||||
else:
|
||||
self.logger.warning(f'unknown platform {plat}; assuming UNIX')
|
||||
libname = Library.JLINK_SDK_NAME + '.so'
|
||||
libname = sdk + '.so'
|
||||
|
||||
lib = Library(dllpath=os.fspath(Path(self.commander).parent /
|
||||
libname))
|
||||
|
||||
@@ -125,7 +125,7 @@ Created: {datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")}
|
||||
|
||||
# write other license info, if any
|
||||
if len(doc.customLicenseIDs) > 0:
|
||||
for lic in list(doc.customLicenseIDs).sort():
|
||||
for lic in sorted(list(doc.customLicenseIDs)):
|
||||
writeOtherLicenseSPDX(f, lic)
|
||||
|
||||
# Open SPDX document file for writing, write the document, and calculate
|
||||
|
||||
@@ -24,6 +24,224 @@
|
||||
#define __FPU_PRESENT CONFIG_CPU_HAS_FPU
|
||||
#define __MPU_PRESENT CONFIG_CPU_HAS_ARM_MPU
|
||||
|
||||
#define __CM4_REV 0x0201 /*!< Core Revision r2p1 */
|
||||
|
||||
#define __VTOR_PRESENT 1 /*!< Set to 1 if VTOR is present */
|
||||
#define __NVIC_PRIO_BITS 3 /*!< Number of Bits used for Priority Levels */
|
||||
#define __Vendor_SysTickConfig 0 /*!< 0 use default SysTick HW */
|
||||
#define __FPU_DP 0 /*!< Set to 1 if FPU is double precision */
|
||||
#define __ICACHE_PRESENT 0 /*!< Set to 1 if I-Cache is present */
|
||||
#define __DCACHE_PRESENT 0 /*!< Set to 1 if D-Cache is present */
|
||||
#define __DTCM_PRESENT 0 /*!< Set to 1 if DTCM is present */
|
||||
|
||||
/** @brief ARM Cortex-M4 NVIC Interrupt Numbers
|
||||
* CM4 NVIC implements 16 internal interrupt sources. CMSIS macros use
|
||||
* negative numbers [-15, -1]. Lower numerical value indicates higher
|
||||
* priority.
|
||||
* -15 = Reset Vector invoked on POR or any CPU reset.
|
||||
* -14 = NMI
|
||||
* -13 = Hard Fault. At POR or CPU reset all faults map to Hard Fault.
|
||||
* -12 = Memory Management Fault. If enabled Hard Faults caused by access
|
||||
* violations, no address match, or MPU mismatch.
|
||||
* -11 = Bus Fault. If enabled pre-fetch, AHB access faults.
|
||||
* -10 = Usage Fault. If enabled Undefined instructions, illegal state
|
||||
* transition (Thumb -> ARM mode), unaligned, etc.
|
||||
* -9 through -6 are not implemented (reserved).
|
||||
* -5 System call via SVC instruction.
|
||||
* -4 Debug Monitor.
|
||||
* -3 not implemented (reserved).
|
||||
* -2 PendSV for system service.
|
||||
* -1 SysTick NVIC system timer.
|
||||
* Numbers >= 0 are external peripheral interrupts.
|
||||
*/
|
||||
typedef enum {
|
||||
/* ========== ARM Cortex-M4 Specific Interrupt Numbers ============ */
|
||||
|
||||
Reset_IRQn = -15, /*!< POR/CPU Reset Vector */
|
||||
NonMaskableInt_IRQn = -14, /*!< NMI */
|
||||
HardFault_IRQn = -13, /*!< Hard Faults */
|
||||
MemoryManagement_IRQn = -12, /*!< Memory Management faults */
|
||||
BusFault_IRQn = -11, /*!< Bus Access faults */
|
||||
UsageFault_IRQn = -10, /*!< Usage/instruction faults */
|
||||
SVCall_IRQn = -5, /*!< SVC */
|
||||
DebugMonitor_IRQn = -4, /*!< Debug Monitor */
|
||||
PendSV_IRQn = -2, /*!< PendSV */
|
||||
SysTick_IRQn = -1, /*!< SysTick */
|
||||
|
||||
/* ============== MEC172x Specific Interrupt Numbers ============ */
|
||||
|
||||
GIRQ08_IRQn = 0, /*!< GPIO 0140 - 0176 */
|
||||
GIRQ09_IRQn = 1, /*!< GPIO 0100 - 0136 */
|
||||
GIRQ10_IRQn = 2, /*!< GPIO 0040 - 0076 */
|
||||
GIRQ11_IRQn = 3, /*!< GPIO 0000 - 0036 */
|
||||
GIRQ12_IRQn = 4, /*!< GPIO 0200 - 0236 */
|
||||
GIRQ13_IRQn = 5, /*!< SMBus Aggregated */
|
||||
GIRQ14_IRQn = 6, /*!< DMA Aggregated */
|
||||
GIRQ15_IRQn = 7,
|
||||
GIRQ16_IRQn = 8,
|
||||
GIRQ17_IRQn = 9,
|
||||
GIRQ18_IRQn = 10,
|
||||
GIRQ19_IRQn = 11,
|
||||
GIRQ20_IRQn = 12,
|
||||
GIRQ21_IRQn = 13,
|
||||
/* GIRQ22(peripheral clock wake) is not connected to NVIC */
|
||||
GIRQ23_IRQn = 14,
|
||||
GIRQ24_IRQn = 15,
|
||||
GIRQ25_IRQn = 16,
|
||||
GIRQ26_IRQn = 17, /*!< GPIO 0240 - 0276 */
|
||||
/* Reserved 18-19 */
|
||||
/* GIRQ's 8 - 12, 24 - 26 no direct connections */
|
||||
I2C_SMB_0_IRQn = 20, /*!< GIRQ13 b[0] */
|
||||
I2C_SMB_1_IRQn = 21, /*!< GIRQ13 b[1] */
|
||||
I2C_SMB_2_IRQn = 22, /*!< GIRQ13 b[2] */
|
||||
I2C_SMB_3_IRQn = 23, /*!< GIRQ13 b[3] */
|
||||
DMA0_IRQn = 24, /*!< GIRQ14 b[0] */
|
||||
DMA1_IRQn = 25, /*!< GIRQ14 b[1] */
|
||||
DMA2_IRQn = 26, /*!< GIRQ14 b[2] */
|
||||
DMA3_IRQn = 27, /*!< GIRQ14 b[3] */
|
||||
DMA4_IRQn = 28, /*!< GIRQ14 b[4] */
|
||||
DMA5_IRQn = 29, /*!< GIRQ14 b[5] */
|
||||
DMA6_IRQn = 30, /*!< GIRQ14 b[6] */
|
||||
DMA7_IRQn = 31, /*!< GIRQ14 b[7] */
|
||||
DMA8_IRQn = 32, /*!< GIRQ14 b[8] */
|
||||
DMA9_IRQn = 33, /*!< GIRQ14 b[9] */
|
||||
DMA10_IRQn = 34, /*!< GIRQ14 b[10] */
|
||||
DMA11_IRQn = 35, /*!< GIRQ14 b[11] */
|
||||
DMA12_IRQn = 36, /*!< GIRQ14 b[12] */
|
||||
DMA13_IRQn = 37, /*!< GIRQ14 b[13] */
|
||||
DMA14_IRQn = 38, /*!< GIRQ14 b[14] */
|
||||
DMA15_IRQn = 39, /*!< GIRQ14 b[15] */
|
||||
UART0_IRQn = 40, /*!< GIRQ15 b[0] */
|
||||
UART1_IRQn = 41, /*!< GIRQ15 b[1] */
|
||||
EMI0_IRQn = 42, /*!< GIRQ15 b[2] */
|
||||
EMI1_IRQn = 43, /*!< GIRQ15 b[3] */
|
||||
EMI2_IRQn = 44, /*!< GIRQ15 b[4] */
|
||||
ACPI_EC0_IBF_IRQn = 45, /*!< GIRQ15 b[5] */
|
||||
ACPI_EC0_OBE_IRQn = 46, /*!< GIRQ15 b[6] */
|
||||
ACPI_EC1_IBF_IRQn = 47, /*!< GIRQ15 b[7] */
|
||||
ACPI_EC1_OBE_IRQn = 48, /*!< GIRQ15 b[8] */
|
||||
ACPI_EC2_IBF_IRQn = 49, /*!< GIRQ15 b[9] */
|
||||
ACPI_EC2_OBE_IRQn = 50, /*!< GIRQ15 b[10] */
|
||||
ACPI_EC3_IBF_IRQn = 51, /*!< GIRQ15 b[11] */
|
||||
ACPI_EC3_OBE_IRQn = 52, /*!< GIRQ15 b[12] */
|
||||
ACPI_EC4_IBF_IRQn = 53, /*!< GIRQ15 b[13] */
|
||||
ACPI_EC4_OBE_IRQn = 54, /*!< GIRQ15 b[14] */
|
||||
ACPI_PM1_CTL_IRQn = 55, /*!< GIRQ15 b[15] */
|
||||
ACPI_PM1_EN_IRQn = 56, /*!< GIRQ15 b[16] */
|
||||
ACPI_PM1_STS_IRQn = 57, /*!< GIRQ15 b[17] */
|
||||
KBC_OBE_IRQn = 58, /*!< GIRQ15 b[18] */
|
||||
KBC_IBF_IRQn = 59, /*!< GIRQ15 b[19] */
|
||||
MBOX_IRQn = 60, /*!< GIRQ15 b[20] */
|
||||
/* reserved 61 */
|
||||
P80BD_0_IRQn = 62, /*!< GIRQ15 b[22] */
|
||||
/* reserved 63-64 */
|
||||
PKE_IRQn = 65, /*!< GIRQ16 b[0] */
|
||||
/* reserved 66 */
|
||||
RNG_IRQn = 67, /*!< GIRQ16 b[2] */
|
||||
AESH_IRQn = 68, /*!< GIRQ16 b[3] */
|
||||
/* reserved 69 */
|
||||
PECI_IRQn = 70, /*!< GIRQ17 b[0] */
|
||||
TACH_0_IRQn = 71, /*!< GIRQ17 b[1] */
|
||||
TACH_1_IRQn = 72, /*!< GIRQ17 b[2] */
|
||||
TACH_2_IRQn = 73, /*!< GIRQ17 b[3] */
|
||||
RPMFAN_0_FAIL_IRQn = 74, /*!< GIRQ17 b[20] */
|
||||
RPMFAN_0_STALL_IRQn = 75, /*!< GIRQ17 b[21] */
|
||||
RPMFAN_1_FAIL_IRQn = 76, /*!< GIRQ17 b[22] */
|
||||
RPMFAN_1_STALL_IRQn = 77, /*!< GIRQ17 b[23] */
|
||||
ADC_SNGL_IRQn = 78, /*!< GIRQ17 b[8] */
|
||||
ADC_RPT_IRQn = 79, /*!< GIRQ17 b[9] */
|
||||
RCID_0_IRQn = 80, /*!< GIRQ17 b[10] */
|
||||
RCID_1_IRQn = 81, /*!< GIRQ17 b[11] */
|
||||
RCID_2_IRQn = 82, /*!< GIRQ17 b[12] */
|
||||
LED_0_IRQn = 83, /*!< GIRQ17 b[13] */
|
||||
LED_1_IRQn = 84, /*!< GIRQ17 b[14] */
|
||||
LED_2_IRQn = 85, /*!< GIRQ17 b[15] */
|
||||
LED_3_IRQn = 86, /*!< GIRQ17 b[16] */
|
||||
PHOT_IRQn = 87, /*!< GIRQ17 b[17] */
|
||||
/* reserved 88-89 */
|
||||
SPIP_0_IRQn = 90, /*!< GIRQ18 b[0] */
|
||||
QMSPI_0_IRQn = 91, /*!< GIRQ18 b[1] */
|
||||
GPSPI_0_TXBE_IRQn = 92, /*!< GIRQ18 b[2] */
|
||||
GPSPI_0_RXBF_IRQn = 93, /*!< GIRQ18 b[3] */
|
||||
GPSPI_1_TXBE_IRQn = 94, /*!< GIRQ18 b[4] */
|
||||
GPSPI_1_RXBF_IRQn = 95, /*!< GIRQ18 b[5] */
|
||||
BCL_0_ERR_IRQn = 96, /*!< GIRQ18 b[7] */
|
||||
BCL_0_BCLR_IRQn = 97, /*!< GIRQ18 b[6] */
|
||||
/* reserved 98-99 */
|
||||
PS2_0_ACT_IRQn = 100, /*!< GIRQ18 b[10] */
|
||||
/* reserved 101-102 */
|
||||
ESPI_PC_IRQn = 103, /*!< GIRQ19 b[0] */
|
||||
ESPI_BM1_IRQn = 104, /*!< GIRQ19 b[1] */
|
||||
ESPI_BM2_IRQn = 105, /*!< GIRQ19 b[2] */
|
||||
ESPI_LTR_IRQn = 106, /*!< GIRQ19 b[3] */
|
||||
ESPI_OOB_UP_IRQn = 107, /*!< GIRQ19 b[4] */
|
||||
ESPI_OOB_DN_IRQn = 108, /*!< GIRQ19 b[5] */
|
||||
ESPI_FLASH_IRQn = 109, /*!< GIRQ19 b[6] */
|
||||
ESPI_RESET_IRQn = 110, /*!< GIRQ19 b[7] */
|
||||
RTMR_IRQn = 111, /*!< GIRQ23 b[10] */
|
||||
HTMR_0_IRQn = 112, /*!< GIRQ23 b[16] */
|
||||
HTMR_1_IRQn = 113, /*!< GIRQ23 b[17] */
|
||||
WK_IRQn = 114, /*!< GIRQ21 b[3] */
|
||||
WKSUB_IRQn = 115, /*!< GIRQ21 b[4] */
|
||||
WKSEC_IRQn = 116, /*!< GIRQ21 b[5] */
|
||||
WKSUBSEC_IRQn = 117, /*!< GIRQ21 b[6] */
|
||||
WKSYSPWR_IRQn = 118, /*!< GIRQ21 b[7] */
|
||||
RTC_IRQn = 119, /*!< GIRQ21 b[8] */
|
||||
RTC_ALARM_IRQn = 120, /*!< GIRQ21 b[9] */
|
||||
VCI_OVRD_IN_IRQn = 121, /*!< GIRQ21 b[10] */
|
||||
VCI_IN0_IRQn = 122, /*!< GIRQ21 b[11] */
|
||||
VCI_IN1_IRQn = 123, /*!< GIRQ21 b[12] */
|
||||
VCI_IN2_IRQn = 124, /*!< GIRQ21 b[13] */
|
||||
VCI_IN3_IRQn = 125, /*!< GIRQ21 b[14] */
|
||||
VCI_IN4_IRQn = 126, /*!< GIRQ21 b[15] */
|
||||
/* reserved 127-128 */
|
||||
PS2_0A_WAKE_IRQn = 129, /*!< GIRQ21 b[18] */
|
||||
PS2_0B_WAKE_IRQn = 130, /*!< GIRQ21 b[19] */
|
||||
/* reserved 131-134 */
|
||||
KEYSCAN_IRQn = 135, /*!< GIRQ21 b[25] */
|
||||
B16TMR_0_IRQn = 136, /*!< GIRQ23 b[0] */
|
||||
B16TMR_1_IRQn = 137, /*!< GIRQ23 b[1] */
|
||||
B16TMR_2_IRQn = 138, /*!< GIRQ23 b[2] */
|
||||
B16TMR_3_IRQn = 139, /*!< GIRQ23 b[3] */
|
||||
B32TMR_0_IRQn = 140, /*!< GIRQ23 b[4] */
|
||||
B32TMR_1_IRQn = 141, /*!< GIRQ23 b[5] */
|
||||
CTMR_0_IRQn = 142, /*!< GIRQ23 b[6] */
|
||||
CTMR_1_IRQn = 143, /*!< GIRQ23 b[7] */
|
||||
CTMR_2_IRQn = 144, /*!< GIRQ23 b[8] */
|
||||
CTMR_3_IRQn = 145, /*!< GIRQ23 b[9] */
|
||||
CCT_IRQn = 146, /*!< GIRQ18 b[20] */
|
||||
CCT_CAP0_IRQn = 147, /*!< GIRQ18 b[21] */
|
||||
CCT_CAP1_IRQn = 148, /*!< GIRQ18 b[22] */
|
||||
CCT_CAP2_IRQn = 149, /*!< GIRQ18 b[23] */
|
||||
CCT_CAP3_IRQn = 150, /*!< GIRQ18 b[24] */
|
||||
CCT_CAP4_IRQn = 151, /*!< GIRQ18 b[25] */
|
||||
CCT_CAP5_IRQn = 152, /*!< GIRQ18 b[26] */
|
||||
CCT_CMP0_IRQn = 153, /*!< GIRQ18 b[27] */
|
||||
CCT_CMP1_IRQn = 154, /*!< GIRQ18 b[28] */
|
||||
EEPROMC_IRQn = 155, /*!< GIRQ18 b[13] */
|
||||
ESPI_VWIRE_IRQn = 156, /*!< GIRQ19 b[8] */
|
||||
/* reserved 157 */
|
||||
I2C_SMB_4_IRQn = 158, /*!< GIRQ13 b[4] */
|
||||
TACH_3_IRQn = 159, /*!< GIRQ17 b[4] */
|
||||
/* reserved 160-165 */
|
||||
SAF_DONE_IRQn = 166, /*!< GIRQ19 b[9] */
|
||||
SAF_ERR_IRQn = 167, /*!< GIRQ19 b[10] */
|
||||
/* reserved 168 */
|
||||
SAF_CACHE_IRQn = 169, /*!< GIRQ19 b[11] */
|
||||
/* reserved 170 */
|
||||
WDT_0_IRQn = 171, /*!< GIRQ21 b[2] */
|
||||
GLUE_IRQn = 172, /*!< GIRQ21 b[26] */
|
||||
OTP_RDY_IRQn = 173, /*!< GIRQ20 b[3] */
|
||||
CLK32K_MON_IRQn = 174, /*!< GIRQ20 b[9] */
|
||||
ACPI_EC0_IRQn = 175, /* ACPI EC OBE and IBF combined into one */
|
||||
ACPI_EC1_IRQn = 176, /* No GIRQ connection. Status in ACPI blocks */
|
||||
ACPI_EC2_IRQn = 177, /* Code uses level bits and NVIC bits */
|
||||
ACPI_EC3_IRQn = 178,
|
||||
ACPI_EC4_IRQn = 179,
|
||||
ACPI_PM1_IRQn = 180,
|
||||
MAX_IRQn
|
||||
} IRQn_Type;
|
||||
|
||||
#include <sys/util.h>
|
||||
|
||||
/* chip specific register defines */
|
||||
|
||||
@@ -205,12 +205,12 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
|
||||
sr.cpu = cpu_num;
|
||||
sr.fn = fn;
|
||||
sr.stack_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
sr.stack_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
sr.arg = arg;
|
||||
sr.vecbase = vb;
|
||||
sr.alive = &alive_flag;
|
||||
|
||||
appcpu_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
appcpu_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
start_rec = &sr;
|
||||
|
||||
|
||||
@@ -331,7 +331,7 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
start_rec.vecbase = vecbase;
|
||||
start_rec.alive = 0;
|
||||
|
||||
z_mp_stack_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
z_mp_stack_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
/* Pre-2.x cAVS delivers the IDC to ROM code, so unmask it */
|
||||
CAVS_INTCTRL[cpu_num].l2.clear = CAVS_L2_IDC;
|
||||
|
||||
@@ -10,6 +10,11 @@ menuconfig LOG
|
||||
|
||||
if LOG
|
||||
|
||||
config LOG_CORE_INIT_PRIORITY
|
||||
int "Log Core Initialization Priority"
|
||||
range 0 99
|
||||
default 0
|
||||
|
||||
rsource "Kconfig.mode"
|
||||
|
||||
rsource "Kconfig.filtering"
|
||||
|
||||
@@ -1276,4 +1276,4 @@ static int enable_logger(const struct device *arg)
|
||||
return 0;
|
||||
}
|
||||
|
||||
SYS_INIT(enable_logger, POST_KERNEL, 0);
|
||||
SYS_INIT(enable_logger, POST_KERNEL, CONFIG_LOG_CORE_INIT_PRIORITY);
|
||||
|
||||
@@ -239,14 +239,16 @@ static bool start_http_client(void)
|
||||
int protocol = IPPROTO_TCP;
|
||||
#endif
|
||||
|
||||
(void)memset(&hints, 0, sizeof(hints));
|
||||
|
||||
if (IS_ENABLED(CONFIG_NET_IPV6)) {
|
||||
hints.ai_family = AF_INET6;
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
} else if (IS_ENABLED(CONFIG_NET_IPV4)) {
|
||||
hints.ai_family = AF_INET;
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
}
|
||||
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
|
||||
while (resolve_attempts--) {
|
||||
ret = getaddrinfo(CONFIG_HAWKBIT_SERVER, CONFIG_HAWKBIT_PORT,
|
||||
&hints, &addr);
|
||||
@@ -412,6 +414,8 @@ static int hawkbit_find_cancelAction_base(struct hawkbit_ctl_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
LOG_DBG("_links.%s.href=%s", "cancelAction", href);
|
||||
|
||||
helper = strstr(href, "cancelAction/");
|
||||
if (!helper) {
|
||||
/* A badly formatted cancel base is a server error */
|
||||
@@ -465,6 +469,8 @@ static int hawkbit_find_deployment_base(struct hawkbit_ctl_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
LOG_DBG("_links.%s.href=%s", "deploymentBase", href);
|
||||
|
||||
helper = strstr(href, "deploymentBase/");
|
||||
if (!helper) {
|
||||
/* A badly formatted deployment base is a server error */
|
||||
@@ -573,17 +579,6 @@ static int hawkbit_parse_deployment(struct hawkbit_dep_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void hawkbit_dump_base(struct hawkbit_ctl_res *r)
|
||||
{
|
||||
LOG_DBG("config.polling.sleep=%s", log_strdup(r->config.polling.sleep));
|
||||
LOG_DBG("_links.deploymentBase.href=%s",
|
||||
log_strdup(r->_links.deploymentBase.href));
|
||||
LOG_DBG("_links.configData.href=%s",
|
||||
log_strdup(r->_links.configData.href));
|
||||
LOG_DBG("_links.cancelAction.href=%s",
|
||||
log_strdup(r->_links.cancelAction.href));
|
||||
}
|
||||
|
||||
static void hawkbit_dump_deployment(struct hawkbit_dep_res *d)
|
||||
{
|
||||
struct hawkbit_dep_res_chunk *c = &d->deployment.chunks[0];
|
||||
@@ -1098,9 +1093,9 @@ enum hawkbit_response hawkbit_probe(void)
|
||||
if (hawkbit_results.base.config.polling.sleep) {
|
||||
/* Update the sleep time. */
|
||||
hawkbit_update_sleep(&hawkbit_results.base);
|
||||
LOG_DBG("config.polling.sleep=%s", hawkbit_results.base.config.polling.sleep);
|
||||
}
|
||||
|
||||
hawkbit_dump_base(&hawkbit_results.base);
|
||||
|
||||
if (hawkbit_results.base._links.cancelAction.href) {
|
||||
ret = hawkbit_find_cancelAction_base(&hawkbit_results.base,
|
||||
@@ -1127,6 +1122,8 @@ enum hawkbit_response hawkbit_probe(void)
|
||||
}
|
||||
|
||||
if (hawkbit_results.base._links.configData.href) {
|
||||
LOG_DBG("_links.%s.href=%s", "configData",
|
||||
hawkbit_results.base._links.configData.href);
|
||||
memset(hb_context.url_buffer, 0, sizeof(hb_context.url_buffer));
|
||||
hb_context.dl.http_content_size = 0;
|
||||
hb_context.url_buffer_size = URL_BUFFER_SIZE;
|
||||
|
||||
@@ -425,13 +425,15 @@ config MCUMGR_BUF_USER_DATA_SIZE
|
||||
int "Size of mcumgr buffer user data"
|
||||
default 24 if MCUMGR_SMP_UDP && MCUMGR_SMP_UDP_IPV6
|
||||
default 8 if MCUMGR_SMP_UDP && MCUMGR_SMP_UDP_IPV4
|
||||
default 8 if MCUMGR_SMP_BT
|
||||
default 4
|
||||
help
|
||||
The size, in bytes, of user data to allocate for each mcumgr buffer.
|
||||
|
||||
Different mcumgr transports impose different requirements for this
|
||||
setting. A value of 4 is sufficient for UART, shell, and bluetooth.
|
||||
For UDP, the userdata must be large enough to hold a IPv4/IPv6 address.
|
||||
setting. A value of 4 is sufficient for UART and shell, a value of 8
|
||||
is sufficient for Bluetooth. For UDP, the userdata must be large
|
||||
enough to hold a IPv4/IPv6 address.
|
||||
|
||||
Note that CONFIG_NET_BUF_USER_DATA_SIZE must be at least as big as
|
||||
MCUMGR_BUF_USER_DATA_SIZE.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/*
|
||||
* Copyright Runtime.io 2018. All rights reserved.
|
||||
* Copyright (c) 2022 Nordic Semiconductor ASA
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -21,13 +22,28 @@
|
||||
|
||||
#include <mgmt/mcumgr/smp.h>
|
||||
|
||||
#include <logging/log.h>
|
||||
LOG_MODULE_REGISTER(smp_bt, CONFIG_MCUMGR_LOG_LEVEL);
|
||||
|
||||
struct device;
|
||||
|
||||
struct smp_bt_user_data {
|
||||
struct bt_conn *conn;
|
||||
uint8_t id;
|
||||
};
|
||||
|
||||
/* Verification of user data being able to fit */
|
||||
BUILD_ASSERT(sizeof(struct smp_bt_user_data) <= CONFIG_MCUMGR_BUF_USER_DATA_SIZE,
|
||||
"CONFIG_MCUMGR_BUF_USER_DATA_SIZE not large enough to fit Bluetooth user data");
|
||||
|
||||
struct conn_param_data {
|
||||
struct bt_conn *conn;
|
||||
uint8_t id;
|
||||
};
|
||||
|
||||
static uint8_t next_id;
|
||||
static struct zephyr_smp_transport smp_bt_transport;
|
||||
static struct conn_param_data conn_data[CONFIG_BT_MAX_CONN];
|
||||
|
||||
/* SMP service.
|
||||
* {8D53DC1D-1DB7-4CD3-868B-8A527460AA84}
|
||||
@@ -41,6 +57,56 @@ static struct bt_uuid_128 smp_bt_svc_uuid = BT_UUID_INIT_128(
|
||||
static struct bt_uuid_128 smp_bt_chr_uuid = BT_UUID_INIT_128(
|
||||
BT_UUID_128_ENCODE(0xda2e7828, 0xfbce, 0x4e01, 0xae9e, 0x261174997c48));
|
||||
|
||||
/* Helper function that allocates conn_param_data for a conn. */
|
||||
static struct conn_param_data *conn_param_data_alloc(struct bt_conn *conn)
|
||||
{
|
||||
for (size_t i = 0; i < ARRAY_SIZE(conn_data); i++) {
|
||||
if (conn_data[i].conn == NULL) {
|
||||
bool valid = false;
|
||||
|
||||
conn_data[i].conn = conn;
|
||||
|
||||
/* Generate an ID for this connection and reset semaphore */
|
||||
while (!valid) {
|
||||
valid = true;
|
||||
conn_data[i].id = next_id;
|
||||
++next_id;
|
||||
|
||||
if (next_id == 0) {
|
||||
/* Avoid use of 0 (invalid ID) */
|
||||
++next_id;
|
||||
}
|
||||
|
||||
for (size_t l = 0; l < ARRAY_SIZE(conn_data); l++) {
|
||||
if (l != i && conn_data[l].conn != NULL &&
|
||||
conn_data[l].id == conn_data[i].id) {
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &conn_data[i];
|
||||
}
|
||||
}
|
||||
|
||||
/* Conn data must exists. */
|
||||
__ASSERT_NO_MSG(false);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Helper function that returns conn_param_data associated with a conn. */
|
||||
static struct conn_param_data *conn_param_data_get(const struct bt_conn *conn)
|
||||
{
|
||||
for (size_t i = 0; i < ARRAY_SIZE(conn_data); i++) {
|
||||
if (conn_data[i].conn == conn) {
|
||||
return &conn_data[i];
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write handler for the SMP characteristic; processes an incoming SMP request.
|
||||
*/
|
||||
@@ -51,6 +117,12 @@ static ssize_t smp_bt_chr_write(struct bt_conn *conn,
|
||||
{
|
||||
struct smp_bt_user_data *ud;
|
||||
struct net_buf *nb;
|
||||
struct conn_param_data *cpd = conn_param_data_get(conn);
|
||||
|
||||
if (cpd == NULL) {
|
||||
printk("cpd is null");
|
||||
return len;
|
||||
}
|
||||
|
||||
nb = mcumgr_buf_alloc();
|
||||
if (!nb) {
|
||||
@@ -59,7 +131,8 @@ static ssize_t smp_bt_chr_write(struct bt_conn *conn,
|
||||
net_buf_add_mem(nb, buf, len);
|
||||
|
||||
ud = net_buf_user_data(nb);
|
||||
ud->conn = bt_conn_ref(conn);
|
||||
ud->conn = conn;
|
||||
ud->id = cpd->id;
|
||||
|
||||
zephyr_smp_rx_req(&smp_bt_transport, nb);
|
||||
|
||||
@@ -113,7 +186,7 @@ static struct bt_conn *smp_bt_conn_from_pkt(const struct net_buf *nb)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return bt_conn_ref(ud->conn);
|
||||
return ud->conn;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -131,7 +204,6 @@ static uint16_t smp_bt_get_mtu(const struct net_buf *nb)
|
||||
}
|
||||
|
||||
mtu = bt_gatt_get_mtu(conn);
|
||||
bt_conn_unref(conn);
|
||||
|
||||
/* Account for the three-byte notification header. */
|
||||
return mtu - 3;
|
||||
@@ -142,8 +214,8 @@ static void smp_bt_ud_free(void *ud)
|
||||
struct smp_bt_user_data *user_data = ud;
|
||||
|
||||
if (user_data->conn) {
|
||||
bt_conn_unref(user_data->conn);
|
||||
user_data->conn = NULL;
|
||||
user_data->id = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,7 +225,8 @@ static int smp_bt_ud_copy(struct net_buf *dst, const struct net_buf *src)
|
||||
struct smp_bt_user_data *dst_ud = net_buf_user_data(dst);
|
||||
|
||||
if (src_ud->conn) {
|
||||
dst_ud->conn = bt_conn_ref(src_ud->conn);
|
||||
dst_ud->conn = src_ud->conn;
|
||||
dst_ud->id = src_ud->id;
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -165,17 +238,26 @@ static int smp_bt_ud_copy(struct net_buf *dst, const struct net_buf *src)
|
||||
static int smp_bt_tx_pkt(struct zephyr_smp_transport *zst, struct net_buf *nb)
|
||||
{
|
||||
struct bt_conn *conn;
|
||||
struct smp_bt_user_data *ud = net_buf_user_data(nb);
|
||||
int rc;
|
||||
|
||||
conn = smp_bt_conn_from_pkt(nb);
|
||||
if (conn == NULL) {
|
||||
rc = -1;
|
||||
} else {
|
||||
rc = smp_bt_tx_rsp(conn, nb->data, nb->len);
|
||||
bt_conn_unref(conn);
|
||||
struct conn_param_data *cpd = conn_param_data_get(conn);
|
||||
|
||||
if (cpd == NULL) {
|
||||
rc = -1;
|
||||
} else if (cpd->id == 0 || cpd->id != ud->id) {
|
||||
/* Connection has been lost or is a different device */
|
||||
rc = -1;
|
||||
} else {
|
||||
rc = smp_bt_tx_rsp(conn, nb->data, nb->len);
|
||||
}
|
||||
}
|
||||
|
||||
smp_bt_ud_free(net_buf_user_data(nb));
|
||||
smp_bt_ud_free(ud);
|
||||
mcumgr_buf_free(nb);
|
||||
|
||||
return rc;
|
||||
@@ -191,10 +273,41 @@ int smp_bt_unregister(void)
|
||||
return bt_gatt_service_unregister(&smp_bt_svc);
|
||||
}
|
||||
|
||||
/* BT connected callback. */
|
||||
static void connected(struct bt_conn *conn, uint8_t err)
|
||||
{
|
||||
if (err == 0) {
|
||||
(void)conn_param_data_alloc(conn);
|
||||
}
|
||||
}
|
||||
|
||||
/* BT disconnected callback. */
|
||||
static void disconnected(struct bt_conn *conn, uint8_t reason)
|
||||
{
|
||||
struct conn_param_data *cpd = conn_param_data_get(conn);
|
||||
|
||||
/* Clear cpd. */
|
||||
if (cpd != NULL) {
|
||||
cpd->id = 0;
|
||||
cpd->conn = NULL;
|
||||
} else {
|
||||
LOG_ERR("Null cpd object for connection %p", (void *)conn);
|
||||
}
|
||||
}
|
||||
|
||||
static int smp_bt_init(const struct device *dev)
|
||||
{
|
||||
ARG_UNUSED(dev);
|
||||
|
||||
next_id = 1;
|
||||
|
||||
/* Register BT callbacks */
|
||||
static struct bt_conn_cb conn_callbacks = {
|
||||
.connected = connected,
|
||||
.disconnected = disconnected,
|
||||
};
|
||||
bt_conn_cb_register(&conn_callbacks);
|
||||
|
||||
zephyr_smp_transport_init(&smp_bt_transport, smp_bt_tx_pkt,
|
||||
smp_bt_get_mtu, smp_bt_ud_copy,
|
||||
smp_bt_ud_free);
|
||||
|
||||
@@ -15,7 +15,9 @@ if NET_BUF
|
||||
|
||||
config NET_BUF_USER_DATA_SIZE
|
||||
int "Size of user_data available in every network buffer"
|
||||
default 8 if ((BT || NET_TCP2) && 64BIT) || BT_ISO
|
||||
default 24 if MCUMGR_SMP_UDP && MCUMGR_SMP_UDP_IPV6
|
||||
default 8 if MCUMGR_SMP_UDP && MCUMGR_SMP_UDP_IPV4
|
||||
default 8 if ((BT || NET_TCP2) && 64BIT) || BT_ISO || MCUMGR_SMP_BT
|
||||
default 4
|
||||
range 4 65535 if BT || NET_TCP2
|
||||
range 0 65535
|
||||
|
||||
@@ -231,7 +231,9 @@ static const char *tcp_flags(uint8_t flags)
|
||||
len += snprintk(buf + len, BUF_SIZE - len, "URG,");
|
||||
}
|
||||
|
||||
buf[len - 1] = '\0'; /* delete the last comma */
|
||||
if (len > 0) {
|
||||
buf[len - 1] = '\0'; /* delete the last comma */
|
||||
}
|
||||
}
|
||||
#undef BUF_SIZE
|
||||
return buf;
|
||||
|
||||
@@ -463,6 +463,11 @@ static ssize_t spair_write(void *obj, const void *buffer, size_t count)
|
||||
}
|
||||
|
||||
if (will_block) {
|
||||
if (k_is_in_isr()) {
|
||||
errno = EAGAIN;
|
||||
res = -1;
|
||||
goto out;
|
||||
}
|
||||
|
||||
for (int signaled = false, result = -1; !signaled;
|
||||
result = -1) {
|
||||
@@ -652,6 +657,11 @@ static ssize_t spair_read(void *obj, void *buffer, size_t count)
|
||||
}
|
||||
|
||||
if (will_block) {
|
||||
if (k_is_in_isr()) {
|
||||
errno = EAGAIN;
|
||||
res = -1;
|
||||
goto out;
|
||||
}
|
||||
|
||||
for (int signaled = false, result = -1; !signaled;
|
||||
result = -1) {
|
||||
|
||||
8
tests/kernel/mutex/mutex_api/Kconfig
Normal file
8
tests/kernel/mutex/mutex_api/Kconfig
Normal file
@@ -0,0 +1,8 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Copyright (c) 2022 Meta
|
||||
|
||||
config TEST_MUTEX_API_THREAD_CREATE_TICKS
|
||||
int "Wait time (in ticks) after thread creation"
|
||||
default 42
|
||||
|
||||
source "Kconfig"
|
||||
@@ -386,6 +386,68 @@ void test_mutex_priority_inheritance(void)
|
||||
k_msleep(TIMEOUT+1000);
|
||||
}
|
||||
|
||||
static void tThread_mutex_lock_should_fail(void *p1, void *p2, void *p3)
|
||||
{
|
||||
k_timeout_t timeout;
|
||||
struct k_mutex *mutex = (struct k_mutex *)p1;
|
||||
|
||||
timeout.ticks = 0;
|
||||
timeout.ticks |= (uint64_t)(uintptr_t)p2 << 32;
|
||||
timeout.ticks |= (uint64_t)(uintptr_t)p3 << 0;
|
||||
|
||||
zassert_equal(-EAGAIN, k_mutex_lock(mutex, timeout), NULL);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Test fix for subtle race during priority inversion
|
||||
*
|
||||
* - A low priority thread (Tlow) locks mutex A.
|
||||
* - A high priority thread (Thigh) blocks on mutex A, boosting the priority
|
||||
* of Tlow.
|
||||
* - Thigh times out waiting for mutex A.
|
||||
* - Before Thigh has a chance to execute, Tlow unlocks mutex A (which now
|
||||
* has no owner) and drops its own priority.
|
||||
* - Thigh now gets a chance to execute and finds that it timed out, and
|
||||
* then enters the block of code to lower the priority of the thread that
|
||||
* owns mutex A (now nobody).
|
||||
* - Thigh tries to the dereference the owner of mutex A (which is nobody,
|
||||
* and thus it is NULL). This leads to an exception.
|
||||
*
|
||||
* @ingroup kernel_mutex_tests
|
||||
*
|
||||
* @see k_mutex_lock()
|
||||
*/
|
||||
static void test_mutex_timeout_race_during_priority_inversion(void)
|
||||
{
|
||||
k_timeout_t timeout;
|
||||
uintptr_t timeout_upper;
|
||||
uintptr_t timeout_lower;
|
||||
int helper_prio = k_thread_priority_get(k_current_get()) + 1;
|
||||
|
||||
k_mutex_init(&mutex);
|
||||
|
||||
/* align to tick boundary */
|
||||
k_sleep(K_TICKS(1));
|
||||
|
||||
/* allow non-kobject data to be shared (via registers) */
|
||||
timeout = K_TIMEOUT_ABS_TICKS(k_uptime_ticks()
|
||||
+ CONFIG_TEST_MUTEX_API_THREAD_CREATE_TICKS);
|
||||
timeout_upper = timeout.ticks >> 32;
|
||||
timeout_lower = timeout.ticks & BIT64_MASK(32);
|
||||
|
||||
k_mutex_lock(&mutex, K_FOREVER);
|
||||
k_thread_create(&tdata, tstack, K_THREAD_STACK_SIZEOF(tstack),
|
||||
tThread_mutex_lock_should_fail, &mutex, (void *)timeout_upper,
|
||||
(void *)timeout_lower, helper_prio,
|
||||
K_USER | K_INHERIT_PERMS, K_NO_WAIT);
|
||||
|
||||
k_thread_priority_set(k_current_get(), K_HIGHEST_THREAD_PRIO);
|
||||
|
||||
k_sleep(timeout);
|
||||
|
||||
k_mutex_unlock(&mutex);
|
||||
}
|
||||
|
||||
/*test case main entry*/
|
||||
void test_main(void)
|
||||
{
|
||||
@@ -400,7 +462,8 @@ void test_main(void)
|
||||
ztest_user_unit_test(test_mutex_reent_lock_timeout_fail),
|
||||
ztest_1cpu_user_unit_test(test_mutex_reent_lock_timeout_pass),
|
||||
ztest_user_unit_test(test_mutex_recursive),
|
||||
ztest_user_unit_test(test_mutex_priority_inheritance)
|
||||
ztest_user_unit_test(test_mutex_priority_inheritance),
|
||||
ztest_1cpu_unit_test(test_mutex_timeout_race_during_priority_inversion)
|
||||
);
|
||||
ztest_run_test_suite(mutex_api);
|
||||
}
|
||||
|
||||
@@ -23,11 +23,9 @@ void test_posix_clock(void)
|
||||
NULL);
|
||||
zassert_equal(errno, EINVAL, NULL);
|
||||
|
||||
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||||
/* 2 Sec Delay */
|
||||
sleep(SLEEP_SECONDS);
|
||||
usleep(SLEEP_SECONDS * USEC_PER_SEC);
|
||||
clock_gettime(CLOCK_MONOTONIC, &te);
|
||||
zassert_ok(clock_gettime(CLOCK_MONOTONIC, &ts), NULL);
|
||||
zassert_ok(k_sleep(K_SECONDS(SLEEP_SECONDS)), NULL);
|
||||
zassert_ok(clock_gettime(CLOCK_MONOTONIC, &te), NULL);
|
||||
|
||||
if (te.tv_nsec >= ts.tv_nsec) {
|
||||
secs_elapsed = te.tv_sec - ts.tv_sec;
|
||||
@@ -38,7 +36,7 @@ void test_posix_clock(void)
|
||||
}
|
||||
|
||||
/*TESTPOINT: Check if POSIX clock API test passes*/
|
||||
zassert_equal(secs_elapsed, (2 * SLEEP_SECONDS),
|
||||
zassert_equal(secs_elapsed, SLEEP_SECONDS,
|
||||
"POSIX clock API test failed");
|
||||
|
||||
printk("POSIX clock APIs test done\n");
|
||||
|
||||
@@ -37,6 +37,8 @@ extern void test_nanosleep_0_500000000(void);
|
||||
extern void test_nanosleep_1_0(void);
|
||||
extern void test_nanosleep_1_1(void);
|
||||
extern void test_nanosleep_1_1001(void);
|
||||
extern void test_sleep(void);
|
||||
extern void test_usleep(void);
|
||||
|
||||
void test_main(void)
|
||||
{
|
||||
@@ -69,7 +71,9 @@ void test_main(void)
|
||||
ztest_unit_test(test_nanosleep_1_0),
|
||||
ztest_unit_test(test_nanosleep_1_1),
|
||||
ztest_unit_test(test_nanosleep_1_1001),
|
||||
ztest_unit_test(test_posix_pthread_create_negative)
|
||||
ztest_unit_test(test_posix_pthread_create_negative),
|
||||
ztest_unit_test(test_sleep),
|
||||
ztest_unit_test(test_usleep)
|
||||
);
|
||||
ztest_run_test_suite(posix_apis);
|
||||
}
|
||||
|
||||
87
tests/posix/common/src/sleep.c
Normal file
87
tests/posix/common/src/sleep.c
Normal file
@@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Copyright (c) 2022, Meta
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#include <posix/unistd.h>
|
||||
#include <ztest.h>
|
||||
|
||||
struct waker_work {
|
||||
k_tid_t tid;
|
||||
struct k_work_delayable dwork;
|
||||
};
|
||||
static struct waker_work ww;
|
||||
|
||||
static void waker_func(struct k_work *work)
|
||||
{
|
||||
struct waker_work *ww;
|
||||
struct k_work_delayable *dwork = k_work_delayable_from_work(work);
|
||||
|
||||
ww = CONTAINER_OF(dwork, struct waker_work, dwork);
|
||||
k_wakeup(ww->tid);
|
||||
}
|
||||
K_WORK_DELAYABLE_DEFINE(waker, waker_func);
|
||||
|
||||
void test_sleep(void)
|
||||
{
|
||||
uint32_t then;
|
||||
uint32_t now;
|
||||
/* call sleep(10), wakeup after 1s, expect >= 8s left */
|
||||
const uint32_t sleep_min_s = 1;
|
||||
const uint32_t sleep_max_s = 10;
|
||||
const uint32_t sleep_rem_s = 8;
|
||||
|
||||
/* sleeping for 0s should return 0 */
|
||||
zassert_ok(sleep(0), NULL);
|
||||
|
||||
/* test that sleeping for 1s sleeps for at least 1s */
|
||||
then = k_uptime_get();
|
||||
zassert_equal(0, sleep(1), NULL);
|
||||
now = k_uptime_get();
|
||||
zassert_true((now - then) >= 1 * MSEC_PER_SEC, NULL);
|
||||
|
||||
/* test that sleeping for 2s sleeps for at least 2s */
|
||||
then = k_uptime_get();
|
||||
zassert_equal(0, sleep(2), NULL);
|
||||
now = k_uptime_get();
|
||||
zassert_true((now - then) >= 2 * MSEC_PER_SEC, NULL);
|
||||
|
||||
/* test that sleep reports the remainder */
|
||||
ww.tid = k_current_get();
|
||||
k_work_init_delayable(&ww.dwork, waker_func);
|
||||
zassert_equal(1, k_work_schedule(&ww.dwork, K_SECONDS(sleep_min_s)), NULL);
|
||||
zassert_true(sleep(sleep_max_s) >= sleep_rem_s, NULL);
|
||||
}
|
||||
|
||||
void test_usleep(void)
|
||||
{
|
||||
uint32_t then;
|
||||
uint32_t now;
|
||||
|
||||
/* test usleep works for small values */
|
||||
/* Note: k_usleep(), an implementation detail, is a cancellation point */
|
||||
zassert_equal(0, usleep(0), NULL);
|
||||
zassert_equal(0, usleep(1), NULL);
|
||||
|
||||
/* sleep for the spec limit */
|
||||
then = k_uptime_get();
|
||||
zassert_equal(0, usleep(USEC_PER_SEC - 1), NULL);
|
||||
now = k_uptime_get();
|
||||
zassert_true(((now - then) * USEC_PER_MSEC) / (USEC_PER_SEC - 1) >= 1, NULL);
|
||||
|
||||
/* sleep for exactly the limit threshold */
|
||||
zassert_equal(-1, usleep(USEC_PER_SEC), NULL);
|
||||
zassert_equal(errno, EINVAL, NULL);
|
||||
|
||||
/* sleep for over the spec limit */
|
||||
zassert_equal(-1, usleep((useconds_t)ULONG_MAX), NULL);
|
||||
zassert_equal(errno, EINVAL, NULL);
|
||||
|
||||
/* test that sleep reports errno = EINTR when woken up */
|
||||
ww.tid = k_current_get();
|
||||
k_work_init_delayable(&ww.dwork, waker_func);
|
||||
zassert_equal(1, k_work_schedule(&ww.dwork, K_USEC(USEC_PER_SEC / 2)), NULL);
|
||||
zassert_equal(-1, usleep(USEC_PER_SEC - 1), NULL);
|
||||
zassert_equal(EINTR, errno, NULL);
|
||||
}
|
||||
@@ -2,6 +2,7 @@ tests:
|
||||
coredump.logging_backend:
|
||||
tags: ignore_faults ignore_qemu_crash
|
||||
filter: CONFIG_ARCH_SUPPORTS_COREDUMP
|
||||
platform_exclude: acrn_ehl_crb
|
||||
harness: console
|
||||
harness_config:
|
||||
type: multi_line
|
||||
|
||||
@@ -2,6 +2,7 @@ tests:
|
||||
coredump.backends.logging:
|
||||
tags: ignore_faults ignore_qemu_crash
|
||||
filter: CONFIG_ARCH_SUPPORTS_COREDUMP
|
||||
platform_exclude: acrn_ehl_crb
|
||||
harness: console
|
||||
harness_config:
|
||||
type: multi_line
|
||||
|
||||
2
west.yml
2
west.yml
@@ -161,7 +161,7 @@ manifest:
|
||||
revision: 70bfbd21cdf5f6d1402bc8d0031e197222ed2ec0
|
||||
path: bootloader/mcuboot
|
||||
- name: mcumgr
|
||||
revision: 9ffebd5e92d9d069667b9af2a3a028f4a033cfd3
|
||||
revision: 480cb48e42703e49595a697bb2410a1fcd105f5e
|
||||
path: modules/lib/mcumgr
|
||||
- name: mipi-sys-t
|
||||
path: modules/debug/mipi-sys-t
|
||||
|
||||
Reference in New Issue
Block a user