diff --git a/.github/workflows/cve_scan_runner.yml b/.github/workflows/cve_scan_runner.yml
new file mode 100644
index 00000000000..09c779d9c1a
--- /dev/null
+++ b/.github/workflows/cve_scan_runner.yml
@@ -0,0 +1,36 @@
+name: cve-scan-runner
+
+on:
+ workflow_dispatch:
+
+ schedule:
+ - cron: "17 23 * * *"
+
+env:
+ # To see the script output in real time
+ PYTHONUNBUFFERED: 1
+
+jobs:
+ scan-and-open-issues:
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Clone the osquery repository
+ uses: actions/checkout@v3
+
+ - name: Install python pre-requisites
+ run: |
+ pip3 install -r ./tools/ci/scripts/cve/requirements.txt
+
+ - name: Scan CVEs and open issues
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ NIST_API_KEY: ${{ secrets.NVD_API_KEY }}
+
+ run: |
+ ./tools/ci/scripts/cve/validate_manifest_libraries_versions.py --manifest libraries/third_party_libraries_manifest.json \
+ --repository .
+
+ ./tools/ci/scripts/cve/third_party_libraries_cves_scanner.py --manifest libraries/third_party_libraries_manifest.json \
+ --create_issues \
+ --debug
diff --git a/.github/workflows/hosted_runners.yml b/.github/workflows/hosted_runners.yml
index 18deb8f2ed1..b6839f63e18 100644
--- a/.github/workflows/hosted_runners.yml
+++ b/.github/workflows/hosted_runners.yml
@@ -75,8 +75,8 @@ jobs:
mkdir -p "${rel_build_path}"
ln -sf "$(pwd)" "${rel_source_path}"
- echo ::set-output name=SOURCE::$(realpath ${rel_source_path})
- echo ::set-output name=BINARY::$(realpath ${rel_build_path})
+ echo "SOURCE=$(realpath ${rel_source_path})" >> $GITHUB_OUTPUT
+ echo "BINARY=$(realpath ${rel_build_path})" >> $GITHUB_OUTPUT
- name: Configure the project
working-directory: ${{ steps.build_paths.outputs.BINARY }}
@@ -96,10 +96,30 @@ jobs:
+ # This jobs checks that the third party libraries manifest has the correct format
+ # and that is up to date compared to the current state of the repository
+ check_libraries_manifest:
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Clone the osquery repository
+ uses: actions/checkout@v3
+
+ - name: Install python pre-requisites
+ run: |
+ pip3 install -r ./tools/ci/scripts/cve/requirements.txt
+
+ - name: Verify the third party libraries manifest
+ run: |
+ ./tools/ci/scripts/cve/validate_manifest_libraries_versions.py --manifest libraries/third_party_libraries_manifest.json \
+ --repository .
+
+
+
# This job runs source code analysis tools (currently, just cppcheck)
check_source_code:
- needs: check_code_style
+ needs: [check_libraries_manifest, check_libraries_manifest]
runs-on: ${{ matrix.os }}
@@ -130,9 +150,9 @@ jobs:
mv .git "${rel_source_path}"
( cd "${rel_source_path}" && git reset --hard )
- echo ::set-output name=SOURCE::$(realpath ${rel_source_path})
- echo ::set-output name=BINARY::$(realpath ${rel_build_path})
- echo ::set-output name=REL_BINARY::${rel_build_path}
+ echo "SOURCE=$(realpath ${rel_source_path})" >> $GITHUB_OUTPUT
+ echo "BINARY=$(realpath ${rel_build_path})" >> $GITHUB_OUTPUT
+ echo "REL_BINARY=${rel_build_path}" >> $GITHUB_OUTPUT
- name: Update the cache (git submodules)
uses: actions/cache@v2
@@ -235,16 +255,16 @@ jobs:
shell: bash
id: build_job_count
run: |
- echo ::set-output name=VALUE::$(($(nproc) + 1))
+ echo "VALUE=$(($(nproc) + 1))" >> $GITHUB_OUTPUT
- name: Select the build options for the tests
shell: bash
id: tests_build_settings
run: |
if [[ "${{ matrix.build_type }}" == "RelWithDebInfo" ]] ; then
- echo ::set-output name=VALUE::OFF
+ echo "VALUE=OFF" >> $GITHUB_OUTPUT
else
- echo ::set-output name=VALUE::ON
+ echo "VALUE=ON" >> $GITHUB_OUTPUT
fi
# We don't have enough space on the worker to actually generate all
@@ -255,9 +275,9 @@ jobs:
id: debug_symbols_settings
run: |
if [[ "${{ matrix.build_type }}" == "Debug" ]] ; then
- echo ::set-output name=VALUE::ON
+ echo "VALUE=ON" >> $GITHUB_OUTPUT
else
- echo ::set-output name=VALUE::OFF
+ echo "VALUE=OFF" >> $GITHUB_OUTPUT
fi
# When we spawn in the container, we are root; create an unprivileged
@@ -267,7 +287,7 @@ jobs:
id: unprivileged_user
run: |
useradd -m -s /bin/bash unprivileged_user
- echo ::set-output name=NAME::unprivileged_user
+ echo "NAME=unprivileged_user" >> $GITHUB_OUTPUT
# Due to how the RPM packaging tools work, we have to adhere to some
# character count requirements in the build path vs source path.
@@ -295,13 +315,13 @@ jobs:
mv .git "${rel_src_path}"
( cd "${rel_src_path}" && git reset --hard )
- echo ::set-output name=SOURCE::$(realpath ${rel_src_path})
- echo ::set-output name=BINARY::$(realpath ${rel_build_path})
- echo ::set-output name=CCACHE::$(realpath ${rel_ccache_path})
- echo ::set-output name=PACKAGING::$(realpath ${rel_packaging_path})
- echo ::set-output name=PACKAGE_DATA::$(realpath ${rel_package_data_path})
- echo ::set-output name=REL_PACKAGE_BUILD::${rel_package_build_path}
- echo ::set-output name=PACKAGE_BUILD::$(realpath ${rel_package_build_path})
+ echo "SOURCE=$(realpath ${rel_src_path})" >> $GITHUB_OUTPUT
+ echo "BINARY=$(realpath ${rel_build_path})" >> $GITHUB_OUTPUT
+ echo "CCACHE=$(realpath ${rel_ccache_path})" >> $GITHUB_OUTPUT
+ echo "PACKAGING=$(realpath ${rel_packaging_path})" >> $GITHUB_OUTPUT
+ echo "PACKAGE_DATA=$(realpath ${rel_package_data_path})" >> $GITHUB_OUTPUT
+ echo "REL_PACKAGE_BUILD=${rel_package_build_path}" >> $GITHUB_OUTPUT
+ echo "PACKAGE_BUILD=$(realpath ${rel_package_build_path})" >> $GITHUB_OUTPUT
- name: Clone the osquery-packaging repository
run: |
@@ -435,12 +455,12 @@ jobs:
id: packages
shell: bash
run: |
- echo ::set-output name=REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH::${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz
- echo ::set-output name=REL_UNSIGNED_RELEASE_DEB_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.deb)
- echo ::set-output name=REL_UNSIGNED_DEBUG_DEB_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.ddeb)
- echo ::set-output name=REL_UNSIGNED_RELEASE_RPM_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-?.*.rpm)
- echo ::set-output name=REL_UNSIGNED_DEBUG_RPM_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-debuginfo-*.rpm)
- echo ::set-output name=REL_UNSIGNED_RELEASE_TGZ_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*linux_x86_64.tar.gz)
+ echo "REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH=${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_DEB_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.deb)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_DEBUG_DEB_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.ddeb)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_RPM_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-?.*.rpm)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_DEBUG_RPM_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-debuginfo-*.rpm)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_TGZ_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*linux_x86_64.tar.gz)" >> $GITHUB_OUTPUT
- name: Store the unsigned release package data artifact
if: matrix.build_type == 'RelWithDebInfo'
@@ -515,7 +535,7 @@ jobs:
shell: bash
id: build_job_count
run: |
- echo ::set-output name=VALUE::$(($(sysctl -n hw.logicalcpu) + 1))
+ echo "VALUE=$(($(sysctl -n hw.logicalcpu) + 1))" >> $GITHUB_OUTPUT
- name: Setup the build paths
shell: bash
@@ -537,16 +557,16 @@ jobs:
${rel_package_data_path} \
${rel_package_build_path}
- echo ::set-output name=SOURCE::$(pwd)/${rel_src_path}
- echo ::set-output name=REL_SOURCE::${rel_src_path}
- echo ::set-output name=BINARY::$(pwd)/${rel_build_path}
- echo ::set-output name=CCACHE::$(pwd)/${rel_ccache_path}
- echo ::set-output name=DOWNLOADS::$(pwd)/${rel_downloads_path}
- echo ::set-output name=INSTALL::$(pwd)/${rel_install_path}
- echo ::set-output name=PACKAGING::$(pwd)/${rel_packaging_path}
- echo ::set-output name=PACKAGE_DATA::$(pwd)/${rel_package_data_path}
- echo ::set-output name=REL_PACKAGE_BUILD::${rel_package_build_path}
- echo ::set-output name=PACKAGE_BUILD::$(pwd)/${rel_package_build_path}
+ echo "SOURCE=$(pwd)/${rel_src_path}" >> $GITHUB_OUTPUT
+ echo "REL_SOURCE=${rel_src_path}" >> $GITHUB_OUTPUT
+ echo "BINARY=$(pwd)/${rel_build_path}" >> $GITHUB_OUTPUT
+ echo "CCACHE=$(pwd)/${rel_ccache_path}" >> $GITHUB_OUTPUT
+ echo "DOWNLOADS=$(pwd)/${rel_downloads_path}" >> $GITHUB_OUTPUT
+ echo "INSTALL=$(pwd)/${rel_install_path}" >> $GITHUB_OUTPUT
+ echo "PACKAGING=$(pwd)/${rel_packaging_path}" >> $GITHUB_OUTPUT
+ echo "PACKAGE_DATA=$(pwd)/${rel_package_data_path}" >> $GITHUB_OUTPUT
+ echo "REL_PACKAGE_BUILD=${rel_package_build_path}" >> $GITHUB_OUTPUT
+ echo "PACKAGE_BUILD=$(pwd)/${rel_package_build_path}" >> $GITHUB_OUTPUT
- name: Clone the osquery repository
uses: actions/checkout@v2
@@ -602,8 +622,11 @@ jobs:
gnu-sed
- name: Install tests dependencies
+ id: install_test_deps
run: |
- pip3 install setuptools \
+ python_root="/usr/local/Frameworks/Python.framework/Versions/Current"
+
+ ${python_root}/bin/pip3 install setuptools \
pexpect==3.3 \
psutil \
timeout_decorator \
@@ -611,6 +634,8 @@ jobs:
thrift==0.11.0 \
osquery
+ echo ::set-output name=PYTHON_ROOT::${python_root}
+
- name: Install CMake
shell: bash
run: |
@@ -624,14 +649,14 @@ jobs:
id: xcode_selector
run: |
xcode_path="/Applications/Xcode_13.0.app/Contents/Developer"
- echo ::set-output name=PATH::${path}
+ echo "PATH=${path}" >> $GITHUB_OUTPUT
sudo xcode-select -s "${xcode_path}"
if [[ "${{ matrix.architecture }}" == "x86_64" ]] ; then
- echo ::set-output name=DEPLOYMENT_TARGET::10.14
+ echo "DEPLOYMENT_TARGET=10.14" >> $GITHUB_OUTPUT
else
- echo ::set-output name=DEPLOYMENT_TARGET::10.15
+ echo "DEPLOYMENT_TARGET=10.15" >> $GITHUB_OUTPUT
fi
# We don't have enough space on the worker to actually generate all
@@ -642,9 +667,9 @@ jobs:
id: debug_symbols_settings
run: |
if [[ "${{ matrix.build_type }}" == "Debug" ]] ; then
- echo ::set-output name=VALUE::ON
+ echo "VALUE=ON" >> $GITHUB_OUTPUT
else
- echo ::set-output name=VALUE::OFF
+ echo "VALUE=OFF" >> $GITHUB_OUTPUT
fi
- name: Configure the project
@@ -663,6 +688,7 @@ jobs:
-DCMAKE_BUILD_TYPE:STRING="${{ matrix.build_type }}" \
-DOSQUERY_BUILD_TESTS=ON \
-DOSQUERY_NO_DEBUG_SYMBOLS=${{ steps.debug_symbols_settings.outputs.VALUE }} \
+ -DPython3_ROOT_DIR=${{ steps.install_test_deps.outputs.PYTHON_ROOT }} \
${{ steps.build_paths.outputs.SOURCE }}
- name: Build the project
@@ -705,7 +731,7 @@ jobs:
id: packages
shell: bash
run: |
- echo ::set-output name=REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz)
+ echo "REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz)" >> $GITHUB_OUTPUT
- name: Store the ${{ matrix.architecture }} unsigned release package data artifact
if: matrix.build_type == 'Release'
@@ -761,7 +787,8 @@ jobs:
- name: Install tests dependencies
run: |
- pip3 install setuptools \
+ python_root="/usr/local/Frameworks/Python.framework/Versions/Current"
+ ${python_root}/pip3 install setuptools \
pexpect==3.3 \
psutil \
timeout_decorator \
@@ -870,8 +897,8 @@ jobs:
id: packages
shell: bash
run: |
- echo ::set-output name=REL_UNSIGNED_RELEASE_PKG_PATH::$(ls package_build/*.pkg)
- echo ::set-output name=REL_UNSIGNED_RELEASE_TGZ_PATH::$(ls package_build/*.tar.gz)
+ echo "REL_UNSIGNED_RELEASE_PKG_PATH=$(ls package_build/*.pkg)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_TGZ_PATH=$(ls package_build/*.tar.gz)" >> $GITHUB_OUTPUT
- name: Store the PKG unsigned release packages
uses: actions/upload-artifact@v1
@@ -911,8 +938,8 @@ jobs:
$rel_sccache_path = "w\sccache"
$rel_downloads_path = "w\downloads"
$rel_install_path = "w\install"
- $rel_package_data_path="w\package_data"
- $rel_packaging_path="w\osquery-packaging"
+ $rel_package_data_path = "w\package_data"
+ $rel_packaging_path = "w\osquery-packaging"
New-Item -ItemType Directory -Force -Path $rel_build_path
New-Item -ItemType Directory -Force -Path $rel_sccache_path
@@ -922,14 +949,14 @@ jobs:
$base_dir = (Get-Item .).FullName
- echo "::set-output name=SOURCE::$base_dir\$rel_src_path"
- echo "::set-output name=REL_SOURCE::$rel_src_path"
- echo "::set-output name=BINARY::$base_dir\$rel_build_path"
- echo "::set-output name=SCCACHE::$base_dir\$rel_sccache_path"
- echo "::set-output name=DOWNLOADS::$base_dir\$rel_downloads_path"
- echo "::set-output name=INSTALL::$base_dir\$rel_install_path"
- echo "::set-output name=PACKAGING::$base_dir\$rel_packaging_path"
- echo "::set-output name=PACKAGE_DATA::$base_dir\$rel_package_data_path"
+ echo "SOURCE=$base_dir\$rel_src_path" >> $env:GITHUB_OUTPUT
+ echo "REL_SOURCE=$rel_src_path" >> $env:GITHUB_OUTPUT
+ echo "BINARY=$base_dir\$rel_build_path" >> $env:GITHUB_OUTPUT
+ echo "SCCACHE=$base_dir\$rel_sccache_path" >> $env:GITHUB_OUTPUT
+ echo "DOWNLOADS=$base_dir\$rel_downloads_path" >> $env:GITHUB_OUTPUT
+ echo "INSTALL=$base_dir\$rel_install_path" >> $env:GITHUB_OUTPUT
+ echo "PACKAGING=$base_dir\$rel_packaging_path" >> $env:GITHUB_OUTPUT
+ echo "PACKAGE_DATA=$base_dir\$rel_package_data_path" >> $env:GITHUB_OUTPUT
# Symbolic links are supported by default on Linux and macOS. On
# Windows, we have to enable them explicitly. They are used to
@@ -953,7 +980,7 @@ jobs:
cd ${{ steps.build_paths.outputs.SOURCE }}
$osquery_version=$(git describe --tags --abbrev=0)
- echo "::set-output name=VALUE::$osquery_version"
+ echo "VALUE=$osquery_version" >> $env:GITHUB_OUTPUT
- name: Clone the osquery-packaging repository
run: |
@@ -1005,7 +1032,7 @@ jobs:
$python_executable_path = $(Get-Command python.exe | Select-Object -ExpandProperty Definition)
$python_root_directory = (Get-Item $python_executable_path).Directory.FullName
- echo "::set-output name=VALUE::$python_root_directory"
+ echo "VALUE=$python_root_directory" >> $env:GITHUB_OUTPUT
# Install the Python dependencies needed for our testing framework
- name: Install tests prerequisites
@@ -1130,7 +1157,7 @@ jobs:
echo "Found compiler version $version"
- echo "::set-output name=COMPILER_VERSION::$version"
+ echo "COMPILER_VERSION=$version" >> $env:GITHUB_OUTPUT
- name: Update the cache (sccache)
uses: actions/cache@v2
@@ -1225,9 +1252,9 @@ jobs:
id: packages
shell: bash
run: |
- echo ::set-output name=REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH::$(ls *.zip)
- echo ::set-output name=REL_UNSIGNED_RELEASE_MSI_PATH::$(ls *.msi)
- echo ::set-output name=REL_UNSIGNED_RELEASE_NUPKG_PATH::$(ls *.nupkg)
+ echo "REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH=$(ls *.zip)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_MSI_PATH=$(ls *.msi)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_NUPKG_PATH=$(ls *.nupkg)" >> $GITHUB_OUTPUT
- name: Store the unsigned release package data artifact
uses: actions/upload-artifact@v1
diff --git a/.github/workflows/self_hosted_runners.yml b/.github/workflows/self_hosted_runners.yml
index beaf29147d1..4ce11997af7 100644
--- a/.github/workflows/self_hosted_runners.yml
+++ b/.github/workflows/self_hosted_runners.yml
@@ -71,8 +71,8 @@ jobs:
mkdir -p "${rel_build_path}"
ln -sf "$(pwd)" "${rel_source_path}"
- echo ::set-output name=SOURCE::$(realpath ${rel_source_path})
- echo ::set-output name=BINARY::$(realpath ${rel_build_path})
+ echo "SOURCE=$(realpath ${rel_source_path})" >> $GITHUB_OUTPUT
+ echo "BINARY=$(realpath ${rel_build_path})" >> $GITHUB_OUTPUT
- name: Configure the project
working-directory: ${{ steps.build_paths.outputs.BINARY }}
@@ -179,7 +179,7 @@ jobs:
shell: bash
id: build_job_count
run: |
- echo ::set-output name=VALUE::$(($(nproc) + 1))
+ echo "VALUE=$(($(nproc) + 1))" >> $GITHUB_OUTPUT
# We don't have enough space on the worker to actually generate all
# the debug symbols (osquery + dependencies), so we have a flag to
@@ -189,9 +189,9 @@ jobs:
id: debug_symbols_settings
run: |
if [[ "${{ matrix.build_type }}" == "Debug" ]] ; then
- echo ::set-output name=VALUE::ON
+ echo "VALUE=ON" >> $GITHUB_OUTPUT
else
- echo ::set-output name=VALUE::OFF
+ echo "VALUE=OFF" >> $GITHUB_OUTPUT
fi
# When we spawn in the container, we are root; create an unprivileged
@@ -200,7 +200,7 @@ jobs:
id: unprivileged_user
run: |
useradd -m -s /bin/bash unprivileged_user
- echo ::set-output name=NAME::unprivileged_user
+ echo "NAME=unprivileged_user" >> $GITHUB_OUTPUT
# Due to how the RPM packaging tools work, we have to adhere to some
# character count requirements in the build path vs source path.
@@ -228,13 +228,13 @@ jobs:
mv .git "${rel_src_path}"
( cd "${rel_src_path}" && git reset --hard )
- echo ::set-output name=SOURCE::$(realpath ${rel_src_path})
- echo ::set-output name=BINARY::$(realpath ${rel_build_path})
- echo ::set-output name=CCACHE::$(realpath ${rel_ccache_path})
- echo ::set-output name=PACKAGING::$(realpath ${rel_packaging_path})
- echo ::set-output name=PACKAGE_DATA::$(realpath ${rel_package_data_path})
- echo ::set-output name=REL_PACKAGE_BUILD::${rel_package_build_path}
- echo ::set-output name=PACKAGE_BUILD::$(realpath ${rel_package_build_path})
+ echo "SOURCE=$(realpath ${rel_src_path})" >> $GITHUB_OUTPUT
+ echo "BINARY=$(realpath ${rel_build_path})" >> $GITHUB_OUTPUT
+ echo "CCACHE=$(realpath ${rel_ccache_path})" >> $GITHUB_OUTPUT
+ echo "PACKAGING=$(realpath ${rel_packaging_path})" >> $GITHUB_OUTPUT
+ echo "PACKAGE_DATA=$(realpath ${rel_package_data_path})" >> $GITHUB_OUTPUT
+ echo "REL_PACKAGE_BUILD=${rel_package_build_path}" >> $GITHUB_OUTPUT
+ echo "PACKAGE_BUILD=$(realpath ${rel_package_build_path})" >> $GITHUB_OUTPUT
- name: Clone the osquery-packaging repository
run: |
@@ -364,12 +364,12 @@ jobs:
id: packages
shell: bash
run: |
- echo ::set-output name=REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH::${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz
- echo ::set-output name=REL_UNSIGNED_RELEASE_DEB_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.deb)
- echo ::set-output name=REL_UNSIGNED_DEBUG_DEB_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.ddeb)
- echo ::set-output name=REL_UNSIGNED_RELEASE_RPM_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-?.*.rpm)
- echo ::set-output name=REL_UNSIGNED_DEBUG_RPM_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-debuginfo-*.rpm)
- echo ::set-output name=REL_UNSIGNED_RELEASE_TGZ_PATH::$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*linux_aarch64.tar.gz)
+ echo "REL_UNSIGNED_RELEASE_PACKAGE_DATA_PATH=${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/package_data.tar.gz" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_DEB_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.deb)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_DEBUG_DEB_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*.ddeb)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_RPM_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-?.*.rpm)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_DEBUG_RPM_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/osquery-debuginfo-*.rpm)" >> $GITHUB_OUTPUT
+ echo "REL_UNSIGNED_RELEASE_TGZ_PATH=$(ls ${{ steps.build_paths.outputs.REL_PACKAGE_BUILD }}/*linux_aarch64.tar.gz)" >> $GITHUB_OUTPUT
- name: Store the unsigned release package data artifact
if: matrix.build_type == 'RelWithDebInfo'
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3cc9e7cf6fa..558378b76ef 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,48 @@
# osquery Changelog
+
+## [5.6.0](https://github.com/osquery/osquery/releases/tag/5.6.0)
+
+[Git Commits](https://github.com/osquery/osquery/compare/5.5.1...5.6.0)
+
+Representing commits from 10 contributors! Thank you all.
+
+### Table Changes
+
+- Add `firmware_type` column to `platform_info` on macOS ([#7727](https://github.com/osquery/osquery/pull/7727))
+- Add additional vendor support for the windows `wmi_bios_info` table ([#7631](https://github.com/osquery/osquery/pull/7631))
+- Fix `docker_container_processes` on macOS ([#7746](https://github.com/osquery/osquery/pull/7746))
+- Fix `process_file_events` subscriber being incorrectly initialized ([#7759](https://github.com/osquery/osquery/pull/7759))
+- Fix `secureboot` on windows by acquire the necessary process privileges ([#7743](https://github.com/osquery/osquery/pull/7743))
+- Improve macOS `mdfind` -- Reduce table overhead and support interruption ([#7738](https://github.com/osquery/osquery/pull/7738))
+- Remove `binary` column from `firefox_addons` table ([#7735](https://github.com/osquery/osquery/pull/7735))
+- Remove `is_running` column from macOS `running_apps` table ([#7774](https://github.com/osquery/osquery/pull/7774))
+
+### Under the Hood improvements
+
+- Add `notes` field to the schema and associated json ([#7747](https://github.com/osquery/osquery/pull/7747))
+- Add extended platforms to the schema and associated json ([#7760](https://github.com/osquery/osquery/pull/7760))
+- Fix a leak and improve users and groups APIs on Windows ([#7755](https://github.com/osquery/osquery/pull/7755))
+- Have `--tls_dump` output body to `stderr` ([#7715](https://github.com/osquery/osquery/pull/7715))
+- Improvements to osquery AWS logic ([#7714](https://github.com/osquery/osquery/pull/7714))
+- Remove leftover FreeBSD related code and documentation ([#7739](https://github.com/osquery/osquery/pull/7739))
+
+### Documentation
+
+- CHANGELOG 5.5.1 ([#7737](https://github.com/osquery/osquery/pull/7737))
+- Correct the description on how to configure and use Yara signature urls ([#7769](https://github.com/osquery/osquery/pull/7769))
+- Document difference between `yara` and `yara_events` ([#7744](https://github.com/osquery/osquery/pull/7744))
+- Link to the slack archives ([#7786](https://github.com/osquery/osquery/pull/7786))
+- Update docs: `_changes` tables are not evented ([#7762](https://github.com/osquery/osquery/pull/7762))
+
+### Build
+
+- Delete temporary CTest files ([#7782](https://github.com/osquery/osquery/pull/7782))
+- Fix table tests for macOS `running_apps` ([#7775](https://github.com/osquery/osquery/pull/7775))
+- Fix table tests for windows `platform_info` ([#7742](https://github.com/osquery/osquery/pull/7742))
+- Migrate jobs from ubuntu-18.04 to ubuntu-20.04 ([#7745](https://github.com/osquery/osquery/pull/7745))
+- Remove unused find_packages modules and submodule ([#7771](https://github.com/osquery/osquery/pull/7771))
+
## [5.5.1](https://github.com/osquery/osquery/releases/tag/5.5.1)
diff --git a/docs/wiki/deployment/yara.md b/docs/wiki/deployment/yara.md
index 683917ae00a..c6e3f55b43a 100644
--- a/docs/wiki/deployment/yara.md
+++ b/docs/wiki/deployment/yara.md
@@ -1,5 +1,7 @@
# YARA-based scanning with osquery
+YARA is a tool that allows you to find textual or binary patterns inside of files.
+
There are two YARA-related tables in osquery, which serve very different purposes. The first table, called
`yara_events`, uses osquery's [Events framework](../development/pubsub-framework.md) to monitor for filesystem changes
and will execute YARA when a file change event fires. The second table, just called `yara`, is a table for performing an
@@ -8,6 +10,8 @@ on-demand YARA scan.
In this document, "signature file" is intended to be synonymous with "YARA rule file" (plain-text files commonly
distributed with a `.yar` or `.yara` filename extension, although any extension is allowed).
+For more information about YARA, check out the [documentation](https://yara.readthedocs.io/en/stable/).
+
## YARA Configuration
The configuration for osquery is simple. Here is an example config, grouping some YARA rule files from the local
diff --git a/docs/wiki/development/cve-scan.md b/docs/wiki/development/cve-scan.md
new file mode 100644
index 00000000000..a5c2cf1af67
--- /dev/null
+++ b/docs/wiki/development/cve-scan.md
@@ -0,0 +1,97 @@
+# CVE Scan
+
+The osquery project has a CI job which once a day scans for CVEs that are present and not yet addressed in its third party libraries.
+The scan is done by a python script at `tools/ci/scripts/cve/third_party_libraries_cves_scanner.py`, which uses the NIST database queried via their NVD APIs; a manifest file at `libraries/third_party_libraries_manifest.json` contains the list of third party libraries and their metadata necessary to correctly download the CVEs.
+
+The manifest file format is validated everytime the CVEs download happen and on every PR by the script `tools/ci/scripts/cve/validate_maninfest_libraries_scanner.py`; additionally the third party library versions in the manifest are verified, to ensure that they are up to date with their state in the repository.
+
+After having downloaded the list of CVEs, the script will open issues in the osquery repository, for all the unresolved CVEs, checking against the already opened ones to prevent duplicates, and it will go back in time up to 6 months old.
+The issues can be recognized because they will be opened by the `github-bot` author and will have the `security`, `libraries`, `cve` and `severity-` labels on them.
+
+NOTE: This product uses the NVD API but is not endorsed or certified by the NVD.
+
+# Updating a third party library to resolve a CVE
+
+The process of updating a third party library is the usual, but in the PR updating the library the contributor MUST:
+
+ 1. Link the CVE(s) issue(s) the PR is going to close, so that when it's merged, they are automatically closed
+ 2. Update the manifest and specifically the `version` and `commit` fields with the information of the new library.
+ Remember that the `commit` has to be the one of the submodule in the osquery repository, which might not always match the commit of the library original repository.
+
+Failing to do step 1. only leads to having to manually close those issue and link them back to the PR for tracking purposes.
+
+Failing to do step 2. will lead to the PR not being mergeable because the CI checks the `commit` field against the actual git submodule commit.
+Note that if the `commit` is updated but not the `version`, this will not be detected by the CI and the periodic scan will use the incorrect
+version to download CVEs, finding again the fixed CVE and reopening the issue.
+If this happens, one just needs to do another PR that updates the `version` correctly.
+
+Any other situation where the `version` is incorrect (older than what previously was or newer than what actually is) is still not detected,
+and will either cause the script to open issues for already fixed CVEs or to miss CVEs, so it's very important that the PR review process
+double checks the new `version`.
+
+Important: Do not merge this kind of PR if the CI CVE scan job is running (which happens only once a day between 23:00 and 00:00 UTC), otherwise the job could start with an old view of the repository and open new issues on already fixed CVEs.
+If this happens, we just need to close or even delete those issues, but it's mostly to avoid additional work or confusions.
+
+# Ignoring a CVE not affecting osquery
+
+There are cases where the API returns CVEs that are not affecting a third party library, not directly, but they are affecting other software that uses the third party library. There might be something we can do in the future to resolve what seems a bug in the API, but for now in the manifest it's possible to list CVEs that should be ignored, so that issues for those are not opened again in the future.
+
+Additionally we often have the case where a CVE is not affecting osquery due to how or what parts of the third party library are used, so having a way to ignore a CVE helps with that too.
+
+The process therefore is to:
+
+ 1. Open a PR which updates the manifest and specifically updates the `ignored-cves` field of the library the CVE comes from.
+ 2. Describe in the CVE(s) issue(s) the reason why they are going to be closed
+ 3. Link the above issues to the PR, so that they are closed when the PR gets merged
+
+Important: As with updating a library, one has to ensure that the CI CVE scan job is not running
+
+# Adding a new library
+
+When a new library gets added, the manifest needs to be updated too, otherwise the CI check that verifies the manifest in the PR will fail.
+
+Currently the JSON format for a third party library as a submodule (taking as an example `libdpkg`) is:
+
+```json
+"libdpkg": {
+ "product": "dpkg",
+ "vendor": "debian",
+ "version": "1.21.7",
+ "commit": "e61f582015a9c67bbb3791cb93a864cfeb9c7151",
+ "ignored-cves": []
+},
+```
+
+The name of the library, `libdpkg`, and the `commit` field must match the name of the folder containing the submodule source code folder, and the commit at which the submodule currently is, respectively.
+
+The `product`, `vendor` and `version` fields are used in the NVD APIs instead, and they must match what the NIST database uses.
+This is a matter of using the CPE search at https://nvd.nist.gov/products/cpe/search, and trying to find which are the correct `product` and `vendor`
+using a cpe like `cpe:2.3:a:**:*:*:*:*:*:*:*:*:*` and for the product `cpe:2.3:a::**:*:*:*:*:*:*:*:*`.
+
+Another way could be to download the full dictionary from https://nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz, then playing with grep and awk.
+For instance if we want to see all the unique products that the `amazon` vendor has:
+```sh
+cat official-cpe-dictionary_v2.3.xml | grep -Eo "cpe:2.3:a:amazon:[^\"]*" | awk -F ":" '{ print $5 }' | sort | uniq | less
+```
+
+For the `ignored-cves` refer to [Ignoring a CVE not affecting osquery](#ignoring-a-cve-not-affecting-osquery); the field will likely be empty at the beginning.
+
+## Special cases
+
+### Libraries without a CPE
+
+Some libraries do not have a CPE assigned, so no CVEs will be found in the NIST database. We track these libraries in the manifest, because the validation script will check for it, but one can provide less fields; only `vendor` and `commit` are required.
+
+The script though also needs to know that this is a library of that kind, so the contributor has to update the list of libraries that does not have a CPE in `tools/scripts/ci/cve/osquery/manifest_api.py`
+
+### Libraries not imported as a submodule
+
+Right now there's only one case and ideally, the only, but `openssl` is not a submodule, so there's no `commit` to use to check if the manifest is up to date. `version` is used instead, and it's parsed from the CMake file at `libraries/cmake/formula/openssl/CMakeLists.txt`.
+If it will ever happen that osquery needs to add another library of this kind, then logic to get its version should be written in the `tools/scripts/ci/cve/validate_manifest_libraries_versions.py` script.
+
+Additionally the name of the library should be added in `tools/scripts/ci/cve/osquery/manifest_api.py` and finally the manifest fields requirements would be the same as for a normal library, just without the `commit` field.
+
+### Libraries not used in the release build
+
+Another case is when the library is only used for testing purposes; this doesn't need to be tracked in the manifest, but still needs to be ignored by the script that checks that all the necessary libraries are present and up to date in the manifest.
+The script to update is `tools/scripts/ci/cve/validate_manifest_libraries_versions.py` (the current example is `googletest`).
diff --git a/libraries/third_party_libraries_manifest.json b/libraries/third_party_libraries_manifest.json
new file mode 100644
index 00000000000..981abcef6a6
--- /dev/null
+++ b/libraries/third_party_libraries_manifest.json
@@ -0,0 +1,300 @@
+{
+ "openssl": {
+ "product": "openssl",
+ "vendor": "openssl",
+ "version": "1.1.1q",
+ "ignored-cves": [
+ "CVE-2007-5536",
+ "CVE-2019-0190"
+ ]
+ },
+ "augeas": {
+ "product": "augeas",
+ "vendor": "augeas",
+ "version": "1.12.0",
+ "commit": "d133d9786350f1560c2f36d49da07d477c52aa76",
+ "ignored-cves": []
+ },
+ "aws-c-auth": {
+ "vendor": "aws",
+ "commit": "ace1311f8ef6ea890b26dd376031bed2721648eb"
+ },
+ "aws-c-cal": {
+ "vendor": "aws",
+ "commit": "aa89aa4950074babe84762413f39bd364ecaf944"
+ },
+ "aws-c-common": {
+ "vendor": "aws",
+ "commit": "c258a154bb89db73eff60a467a0750ee5435ebc6"
+ },
+ "aws-c-compression": {
+ "vendor": "aws",
+ "commit": "fc1631ea1ce563b0a453cb7a7233fca949e36970"
+ },
+ "aws-c-event-stream": {
+ "vendor": "aws",
+ "commit": "e87537be561d753ec82e783bc0929b1979c585f8"
+ },
+ "aws-c-http": {
+ "vendor": "aws",
+ "commit": "fa1692ae103dcc40e3d0a9db1b29acfc204a294e"
+ },
+ "aws-c-io": {
+ "vendor": "aws",
+ "commit": "14b5e6d73b41eeabf04fc6228276eb1eb59bd99c"
+ },
+ "aws-c-mqtt": {
+ "vendor": "aws",
+ "commit": "0a70bf814845e487b7e4862af7ad9e4a1199b5f4"
+ },
+ "aws-c-s3": {
+ "vendor": "aws",
+ "commit": "bd1f6910503165963506f9f168c87416996197b6"
+ },
+ "aws-checksums": {
+ "vendor": "aws",
+ "commit": "99bb0ad4b89d335d638536694352c45e0d2188f5"
+ },
+ "aws-crt-cpp": {
+ "vendor": "aws",
+ "commit": "c2d6ffa5597825111cc76ad71ffc6aef664d0f25"
+ },
+ "aws-sdk-cpp": {
+ "vendor": "aws",
+ "commit": "1135c3ac31df6ab68d3bf58fc6679368da1f09e0"
+ },
+ "s2n": {
+ "vendor": "aws",
+ "commit": "7f43b102def1d52422f6c3e48d5cb3e6dd26c646"
+ },
+ "gnulib": {
+ "product": "gnulib",
+ "vendor": "gnulib",
+ "date": "2019-04-07",
+ "commit": "91584ed612fa26a505c8fc4c7f6fb19f7413795d",
+ "ignored-cves": []
+ },
+ "boost": {
+ "product": "boost",
+ "vendor": "boost",
+ "version": "1.77.0",
+ "commit": "9d3f9bcd7d416880d4631d7d39cceeb4e8f25da0",
+ "ignored-cves": []
+ },
+ "bzip2": {
+ "product": "bzip2",
+ "vendor": "bzip",
+ "version": "1.0.8",
+ "commit": "6a8690fc8d26c815e798c588f796eabe9d684cf0",
+ "ignored-cves": []
+ },
+ "dbus": {
+ "product": "d-bus",
+ "vendor": "d-bus_project",
+ "version": "1.12.20",
+ "commit": "ab88811768f750777d1a8b9d9ab12f13390bfd3a",
+ "ignored-cves": []
+ },
+ "ebpfpub": {
+ "vendor": "trailofbits",
+ "commit": "71c8554bbbf3f78e9c9ea39fd9f349fccf997dce"
+ },
+ "expat": {
+ "product": "expat",
+ "vendor": "libexpat_project",
+ "version": "2.4.7",
+ "commit": "27d5b8ba1771f916d9cfea2aac6bdac72071dc66",
+ "ignored-cves": []
+ },
+ "gflags": {
+ "vendor": "google",
+ "version": "2.2.2",
+ "commit": "e171aa2d15ed9eb17054558e0b3a6a413bb01067"
+ },
+ "glog": {
+ "vendor": "google",
+ "version": "0.5.0",
+ "commit": "8f9ccfe770add9e4c64e9b25c102658e3c763b73"
+ },
+ "libarchive": {
+ "product": "libarchive",
+ "vendor": "libarchive",
+ "version": "3.6.1",
+ "commit": "6c3301111caa75c76e1b2acb1afb2d71341932ef",
+ "ignored-cves": []
+ },
+ "libaudit": {
+ "product": "audit-userspace",
+ "vendor": "audit-userspace_project",
+ "version": "2.4.3",
+ "commit": "20204ccf43e856818d7ef695242192b3f5963aae",
+ "ignored-cves": []
+ },
+ "libcap": {
+ "product": "libcap",
+ "vendor": "libcap_project",
+ "version": "2.59",
+ "commit": "9eb56596eef5e55a596aa97ecaf8466ea559d05c",
+ "ignored-cves": []
+ },
+ "libcryptsetup": {
+ "product": "cryptsetup",
+ "vendor": "cryptsetup_project",
+ "version": "1.7.5",
+ "commit": "0ba577666c62bb3c82e90f3c8dd01f3f81a26cf4",
+ "ignored-cves": []
+ },
+ "libdevmapper": {
+ "product": "lvm2",
+ "vendor": "redhat",
+ "version": "2.02.173",
+ "commit": "88f15659374042f7657d73393f73e267d752b4e1",
+ "ignored-cves": []
+ },
+ "libdpkg": {
+ "product": "dpkg",
+ "vendor": "debian",
+ "version": "1.21.7",
+ "commit": "e61f582015a9c67bbb3791cb93a864cfeb9c7151",
+ "ignored-cves": []
+ },
+ "libgcrypt": {
+ "product": "libgcrypt",
+ "vendor": "gnupg",
+ "version": "1.8.1",
+ "commit": "80fd8615048c3897b91a315cca22ab139b056ccd",
+ "ignored-cves": []
+ },
+ "libgpg-error": {
+ "product": "libgpg-error",
+ "vendor": "gnupg",
+ "version": "1.27",
+ "commit": "c1668f61c58ea6f0439e5193d83b4e2ac622b286",
+ "ignored-cves": []
+ },
+ "libiptables": {
+ "product": "iptables",
+ "vendor": "netfilter",
+ "version": "1.8.3",
+ "commit": "1447b15100fe73810237809c1d4ade3c861b6d96",
+ "ignored-cves": []
+ },
+ "libmagic": {
+ "product": "file",
+ "vendor": "file_project",
+ "version": "5.40",
+ "commit": "f49fda6f52a9477d817dbd9c06afab02daf025f8",
+ "ignored-cves": []
+ },
+ "librdkafka": {
+ "product": "librdkafka",
+ "vendor": "edenhill",
+ "version": "1.8.0",
+ "commit": "9ded5eefaf3ba3b65ebc95b0dff7a6d5faaaa38d"
+ },
+ "librpm": {
+ "product": "rpm",
+ "vendor": "rpm",
+ "version": "4.17.0",
+ "commit": "3e74e8ba2dd5e76a5353d238dc7fc38651ce27b3",
+ "ignored-cves": []
+ },
+ "libudev": {
+ "product": "udev",
+ "vendor": "udev_project",
+ "version": "174",
+ "commit": "b3eccdce81d18ec36c6ea95ac161002fc33d1810",
+ "ignored-cves": []
+ },
+ "libxml2": {
+ "product": "libxml2",
+ "vendor": "xmlsoft",
+ "version": "2.9.14",
+ "commit": "7846b0a677f8d3ce72486125fa281e92ac9970e8",
+ "ignored-cves": []
+ },
+ "linenoise-ng": {
+ "product": "linenoise-ng",
+ "vendor": "arangodb",
+ "version": "master",
+ "commit": "4754bee2d8eb3c4511e6ac87cac62255b2011e2f",
+ "ignored-cves": []
+ },
+ "lzma": {
+ "product": "xz",
+ "vendor": "tukaani",
+ "version": "5.2.5",
+ "commit": "2327a461e1afce862c22269b80d3517801103c1b",
+ "ignored-cves": []
+ },
+ "popt": {
+ "product": "popt",
+ "vendor": "popt_project",
+ "version": "1.16",
+ "commit": "abe4af616ffc0e22e54d691e73a67fabc267cc26",
+ "ignored-cves": []
+ },
+ "rapidjson": {
+ "product": "rapidjson",
+ "vendor": "tencent",
+ "version": "1.1.0",
+ "commit": "1a825d24fa322a5fe721624b2ed7a18b6de9b48a",
+ "ignored-cves": []
+ },
+ "rocksdb": {
+ "product": "rocksdb",
+ "vendor": "facebook",
+ "version": "6.22.1",
+ "commit": "51b540921dd7495c9cf2265eb58942dad1f2ef72",
+ "ignored-cves": []
+ },
+ "sleuthkit": {
+ "product": "the_sleuth_kit",
+ "vendor": "sleuthkit",
+ "version": "4.11.0",
+ "commit": "a397493d8fd5198b40d6c0ce1e4135c1f86d9ea9",
+ "ignored-cves": []
+ },
+ "sqlite": {
+ "product": "sqlite",
+ "vendor": "sqlite",
+ "version": "3.39.2",
+ "commit": "cea3fbb89fb5dbf9a613964a3786867df17a0204",
+ "ignored-cves": []
+ },
+ "thrift": {
+ "product": "thrift",
+ "vendor": "apache",
+ "version": "0.15.0",
+ "commit": "8317ec43ea2425b6f8e24e4dc4f5b2360f717eb4",
+ "ignored-cves": []
+ },
+ "util-linux": {
+ "product": "util-linux",
+ "vendor": "kernel",
+ "version": "2.27.1",
+ "commit": "9f5e5bec91a72365b9baa771fa02cbedab804fe3",
+ "ignored-cves": []
+ },
+ "yara": {
+ "product": "yara",
+ "vendor": "virustotal",
+ "version": "4.1.3",
+ "commit": "b99a808cf9955090b909c72d6a0da5295c3cbc7c",
+ "ignored-cves": []
+ },
+ "zlib": {
+ "product": "zlib",
+ "vendor": "zlib",
+ "version": "1.2.12",
+ "commit": "21767c654d31d2dccdde4330529775c6c5fd5389",
+ "ignored-cves": []
+ },
+ "zstd": {
+ "product": "zstandard",
+ "vendor": "facebook",
+ "version": "1.4.0",
+ "commit": "83b51e9f886be7c2a4d477b6e7bc6db831791d8d",
+ "ignored-cves": ["CVE-2021-24031"]
+ }
+}
diff --git a/osquery/core/watcher.cpp b/osquery/core/watcher.cpp
index a9298e3ea12..e11a4f9e8cc 100644
--- a/osquery/core/watcher.cpp
+++ b/osquery/core/watcher.cpp
@@ -598,12 +598,6 @@ Status WatcherRunner::isChildSane(const PlatformProcess& child) const {
1, "Memory limits exceeded: " + std::to_string(change.footprint));
}
- // The worker is sane, no action needed.
- // Attempt to flush status logs to the well-behaved worker.
- if (use_worker_ && child.pid() == watcher_->getWorker().pid()) {
- relayStatusLogs();
- }
-
return Status(0);
}
diff --git a/osquery/dispatcher/scheduler.cpp b/osquery/dispatcher/scheduler.cpp
index f738873cee7..904737859ef 100644
--- a/osquery/dispatcher/scheduler.cpp
+++ b/osquery/dispatcher/scheduler.cpp
@@ -224,6 +224,17 @@ void SchedulerRunner::maybeScheduleCarves(uint64_t time_step) {
void SchedulerRunner::maybeReloadSchedule(uint64_t time_step) {
if (FLAGS_schedule_reload > 0 && (time_step % FLAGS_schedule_reload) == 0) {
+ /* Before resetting the database we want to ensure that there's no pending
+ log relay thread started by the scheduler thread in a previous loop,
+ to avoid deadlocks.
+ This is because resetDatabase logs and also holds an exclusive lock
+ to the database, so when a log relay thread started via relayStatusLog
+ is pending, log calls done on the same thread that started it
+ (in this case the scheduler thread), will wait until the log relaying
+ thread finishes serializing the logs to the database; but this can't
+ happen due to the exclusive lock. */
+ waitLogRelay();
+
if (FLAGS_schedule_reload_sql) {
SQLiteDBManager::resetPrimary();
}
@@ -232,7 +243,14 @@ void SchedulerRunner::maybeReloadSchedule(uint64_t time_step) {
}
void SchedulerRunner::maybeFlushLogs(uint64_t time_step) {
- // GLog is not re-entrant, so logs must be flushed in a dedicated thread.
+ /* In daemon mode we start a log relay thread to flush the logs from the
+ BufferedLogSink to the database.
+ The thread is started from the scheduler thread,
+ since if we did it in the send() function of BufferedLogSink,
+ inline to the log call itself, we would cause deadlocks
+ if there's recursive logging caused by the logger plugins.
+ We do the flush itself also in a new thread so we don't slow down
+ the scheduler thread too much */
if ((time_step % 3) == 0) {
relayStatusLogs(LoggerRelayMode::Async);
}
@@ -281,6 +299,10 @@ void SchedulerRunner::start() {
}
}
+ /* Wait for the thread relaying/flushing the logs,
+ to prevent race conditions on shutdown */
+ waitLogRelay();
+
// Scheduler ended.
if (!interrupted() && request_shutdown_on_expiration) {
LOG(INFO) << "The scheduler ended after " << timeout_ << " seconds";
diff --git a/osquery/events/darwin/endpointsecurity.h b/osquery/events/darwin/endpointsecurity.h
index 18dcdad5dd4..ba1ef70e1b6 100644
--- a/osquery/events/darwin/endpointsecurity.h
+++ b/osquery/events/darwin/endpointsecurity.h
@@ -57,6 +57,7 @@ struct EndpointSecurityEventContext : public EventContext {
std::string team_id;
std::string cdhash;
bool platform_binary;
+ std::string codesigning_flags;
std::string executable;
std::string username;
diff --git a/osquery/events/darwin/es_utils.cpp b/osquery/events/darwin/es_utils.cpp
index 08cdcc10b78..b777627db6d 100644
--- a/osquery/events/darwin/es_utils.cpp
+++ b/osquery/events/darwin/es_utils.cpp
@@ -7,6 +7,8 @@
* SPDX-License-Identifier: (Apache-2.0 OR GPL-2.0-only)
*/
+#include
+#include
#include
#include
#include
@@ -64,6 +66,37 @@ std::string getSigningId(const es_process_t* p) {
: "";
}
+std::string getCodesigningFlags(const es_process_t* p) {
+ // Parses flags from kern/cs_blobs.h header that are useful for monitoring.
+ // Flags that are commonly set are inverted to make unusual or potentially
+ // insecure processes stand out.
+
+ std::vector flags;
+ if (!(p->codesigning_flags & CS_VALID)) {
+ // Process code signature is invalid, either initially or after paging
+ // in an invalid page to a previously valid code signature.
+ flags.push_back("NOT_VALID");
+ }
+
+ if (p->codesigning_flags & CS_ADHOC) {
+ // Process is signed "ad-hoc", without a code signing identity.
+ flags.push_back("ADHOC");
+ }
+
+ if (!(p->codesigning_flags & CS_RUNTIME)) {
+ // Process is signed without using the hardened runtime.
+ flags.push_back("NOT_RUNTIME");
+ }
+
+ if (p->codesigning_flags & CS_INSTALLER) {
+ // Process has installer entitlement, which can modify system integrity
+ // protected (SIP) files.
+ flags.push_back("INSTALLER");
+ }
+
+ return boost::algorithm::join(flags, ", ");
+}
+
std::string getTeamId(const es_process_t* p) {
return p->team_id.length > 0 && p->team_id.data != nullptr ? p->team_id.data
: "";
@@ -112,6 +145,7 @@ void getProcessProperties(const es_process_t* p,
ec->team_id = getTeamId(p);
ec->cdhash = getCDHash(p);
ec->platform_binary = p->is_platform_binary;
+ ec->codesigning_flags = getCodesigningFlags(p);
auto user = getpwuid(ec->uid);
ec->username = user->pw_name != nullptr ? std::string(user->pw_name) : "";
@@ -119,4 +153,4 @@ void getProcessProperties(const es_process_t* p,
ec->cwd = getCwdPathFromPid(ec->pid);
}
-} // namespace osquery
\ No newline at end of file
+} // namespace osquery
diff --git a/osquery/logger/data_logger.h b/osquery/logger/data_logger.h
index 489797947e7..d5e38a6b314 100644
--- a/osquery/logger/data_logger.h
+++ b/osquery/logger/data_logger.h
@@ -124,6 +124,15 @@ Status logSnapshotQuery(const QueryLogItem& item);
*/
void relayStatusLogs(LoggerRelayMode relay_mode = LoggerRelayMode::Sync);
+/**
+ * @brief Waits for the relay thread to finish
+ *
+ * Waits for the new relay thread launched by the relayStatusLogs function,
+ * called previously on the current thread, to finish.
+ * Must not be called in a path that can be called by Google Log.
+ */
+void waitLogRelay();
+
/// Inspect the number of internal-buffered status log lines.
size_t queuedStatuses();
diff --git a/osquery/logger/logger.cpp b/osquery/logger/logger.cpp
index ed8391f48cf..e3733de7011 100644
--- a/osquery/logger/logger.cpp
+++ b/osquery/logger/logger.cpp
@@ -326,7 +326,7 @@ void BufferedLogSink::send(google::LogSeverity severity,
std::string()});
}
- // The daemon will relay according to the schedule.
+ // This is for testing only, the daemon will relay according to the schedule.
if (enabled_ && !isDaemon()) {
relayStatusLogs(FLAGS_logger_status_sync ? LoggerRelayMode::Sync
: LoggerRelayMode::Async);
@@ -479,6 +479,18 @@ size_t queuedStatuses() {
return BufferedLogSink::get().dump().size();
}
+void waitLogRelay() {
+ if (kOptBufferedLogSinkSender.has_value()) {
+ /* NOTE: We are not doing a workaround for Windows
+ as in BufferedLogSink::WaitTillSent because we are not and we must not be
+ in a path called by Google Log, and failing to properly wait
+ for the thread to finish will either cause a race condition or a deadlock
+ */
+ kOptBufferedLogSinkSender->wait();
+ kOptBufferedLogSinkSender.reset();
+ }
+}
+
void relayStatusLogs(LoggerRelayMode relay_mode) {
if (FLAGS_disable_logging || !databaseInitialized()) {
// The logger plugins may not be setUp if logging is disabled.
@@ -533,6 +545,9 @@ void relayStatusLogs(LoggerRelayMode relay_mode) {
if (relay_mode == LoggerRelayMode::Sync) {
sender();
} else {
+ // Wait on a previous relaying thread, if present
+ waitLogRelay();
+
std::packaged_task task(std::move(sender));
kOptBufferedLogSinkSender = task.get_future();
std::thread(std::move(task)).detach();
diff --git a/osquery/remote/http_client.cpp b/osquery/remote/http_client.cpp
index beb05c04d30..9380c1eb7ce 100644
--- a/osquery/remote/http_client.cpp
+++ b/osquery/remote/http_client.cpp
@@ -458,7 +458,7 @@ Response Client::put(Request& req,
std::string const& content_type) {
req.method(beast_http::verb::put);
req.body() = body;
- if (!content_type.empty() && content_type.size() < 512) {
+ if (!content_type.empty()) {
req.set(beast_http::field::content_type, content_type);
}
return sendHTTPRequest(req);
@@ -480,7 +480,7 @@ Response Client::put(Request& req,
std::string const& content_type) {
req.method(beast_http::verb::put);
req.body() = std::move(body);
- if (!content_type.empty() && content_type.size() < 512) {
+ if (!content_type.empty()) {
req.set(beast_http::field::content_type, content_type);
}
return sendHTTPRequest(req);
diff --git a/osquery/tables/applications/posix/docker.cpp b/osquery/tables/applications/posix/docker.cpp
index 1f2d9603e66..19a0f26e050 100644
--- a/osquery/tables/applications/posix/docker.cpp
+++ b/osquery/tables/applications/posix/docker.cpp
@@ -409,8 +409,13 @@ QueryData genContainers(QueryContext& context) {
BIGINT(container_details.get_child("State").get("Pid", -1));
r["started_at"] = container_details.get_child("State").get(
"StartedAt", "");
- r["finished_at"] = container_details.get_child("State").get(
- "FinishedAt", "");
+ if (r["state"] != "running") {
+ r["finished_at"] =
+ container_details.get_child("State").get("FinishedAt",
+ "");
+ } else {
+ r["finished_at"] = "";
+ }
r["privileged"] = container_details.get_child("HostConfig")
.get("Privileged", false)
? INTEGER(1)
diff --git a/osquery/tables/events/darwin/es_process_events.cpp b/osquery/tables/events/darwin/es_process_events.cpp
index a9fb29e93f1..6d8ef88607a 100644
--- a/osquery/tables/events/darwin/es_process_events.cpp
+++ b/osquery/tables/events/darwin/es_process_events.cpp
@@ -62,6 +62,7 @@ Status ESProcessEventSubscriber::Callback(
r["signing_id"] = ec->signing_id;
r["team_id"] = ec->team_id;
r["cdhash"] = ec->cdhash;
+ r["codesigning_flags"] = ec->codesigning_flags;
r["cmdline"] = ec->args;
r["cmdline_count"] = BIGINT(ec->argc);
diff --git a/osquery/tables/system/CMakeLists.txt b/osquery/tables/system/CMakeLists.txt
index 8b0e3d85258..2d93e53155b 100644
--- a/osquery/tables/system/CMakeLists.txt
+++ b/osquery/tables/system/CMakeLists.txt
@@ -202,6 +202,8 @@ function(generateOsqueryTablesSystemSystemtable)
windows/registry.cpp
windows/scheduled_tasks.cpp
windows/secureboot.cpp
+ windows/security_profile_info_utils.cpp
+ windows/security_profile_info.cpp
windows/services.cpp
windows/shared_resources.cpp
windows/shellbags.cpp
@@ -269,6 +271,7 @@ function(generateOsqueryTablesSystemSystemtable)
if(DEFINED PLATFORM_LINUX)
target_link_libraries(osquery_tables_system_systemtable PUBLIC
osquery_utils_linux
+ osquery_utils_system_boottime
thirdparty_libdevmapper
thirdparty_libcryptsetup
thirdparty_librpm
@@ -348,6 +351,7 @@ function(generateOsqueryTablesSystemSystemtable)
windows/certificates.h
windows/windows_eventlog.h
windows/windows_update_history.h
+ windows/security_profile_info_utils.h
)
endif()
diff --git a/osquery/tables/system/darwin/processes.cpp b/osquery/tables/system/darwin/processes.cpp
index 2aa21394231..8f44c670f82 100644
--- a/osquery/tables/system/darwin/processes.cpp
+++ b/osquery/tables/system/darwin/processes.cpp
@@ -8,12 +8,11 @@
*/
#include
+#include
#include
#include
#include
-#include
-
#include
#include