Skip to content

Commit 581a4a1

Browse files
authored
Build workflow (#2)
Adds a workflow for building python wheels and the app image
1 parent 702a29b commit 581a4a1

File tree

12 files changed

+246
-22
lines changed

12 files changed

+246
-22
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# action.yml
2+
name: 'Build and run docker container'
3+
description: 'Builds and runs the docker container for the Dockerfile in the root'
4+
runs:
5+
using: 'docker'
6+
image: '../../../Dockerfile'

.github/workflows/main.yml

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
# This is a basic workflow to help you get started with Actions
2+
3+
name: Build artifacts
4+
5+
# Controls when the workflow will run
6+
on:
7+
# Triggers the workflow on push or pull request events but only for the main branch
8+
push:
9+
branches: [ main ]
10+
pull_request:
11+
branches: [ main ]
12+
13+
# Allows you to run this workflow manually from the Actions tab
14+
workflow_dispatch:
15+
16+
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
17+
jobs:
18+
# This workflow contains a single job called "build"
19+
build:
20+
# The type of runner that the job will run on
21+
runs-on: ubuntu-18.04
22+
23+
strategy:
24+
matrix:
25+
configuration: [py36, py37, py38, py39, appimage]
26+
include:
27+
- configuration: py36
28+
python_version: 3.6
29+
build_python: ON
30+
build_appimage: OFF
31+
- configuration: py37
32+
python_version: 3.7
33+
build_python: ON
34+
build_appimage: OFF
35+
- configuration: py38
36+
python_version: 3.8
37+
build_python: ON
38+
build_appimage: OFF
39+
- configuration: py39
40+
python_version: 3.9
41+
build_python: ON
42+
build_appimage: OFF
43+
- configuration: appimage
44+
python_version: 3.9
45+
build_python: OFF
46+
build_appimage: ON
47+
48+
env:
49+
PYTHON_VERSION: ${{ matrix.python_version }}
50+
BUILD_PYTHON_MODULE: ${{ matrix.build_python }}
51+
BUILD_APPIMAGE: ${{ matrix.build_appimage }}
52+
MAKE_FLAGS: -j2
53+
54+
# Steps represent a sequence of tasks that will be executed as part of the job
55+
steps:
56+
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
57+
- uses: actions/checkout@v2
58+
59+
- name: Download model files
60+
run: |
61+
wget -nv https://github.com/isl-org/adaptive-surface-reconstruction/releases/download/models/models.zip
62+
unzip models.zip
63+
rm models.zip
64+
65+
- name: Build inside container
66+
uses: ./.github/actions/docker-action
67+
68+
- name: Upload artifacts
69+
uses: actions/upload-artifact@v2
70+
with:
71+
name: artifact_${{ matrix.configuration }}
72+
path: |
73+
build/a*.whl
74+
build/asr*.AppImage
75+

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
build
22
__pycache__
3+
datasets/t10k
4+
dummy_resource_file
35
*.ply

CMakeLists.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ cmake_minimum_required( VERSION 3.18 )
33
project(adaptivesurfacereconstruction VERSION 0.1.0)
44

55
option(BUILD_PYTHON_MODULE "Build the python module" ON)
6-
option(ENABLE_APPIMAGE "Enables the target 'appimage' for building the AppImage" OFF)
6+
option(BUILD_APPIMAGE "Build the AppImage" OFF)
77

88
if( NOT CMAKE_BUILD_TYPE )
99
set( CMAKE_BUILD_TYPE "Release" CACHE STRING "Build configuration 'Release' or 'Debug'." FORCE )
@@ -23,7 +23,7 @@ if( BUILD_PYTHON_MODULE )
2323
add_subdirectory( cpp/pybind pybind )
2424
add_subdirectory( python )
2525
endif()
26-
if( ENABLE_APPIMAGE )
26+
if( BUILD_APPIMAGE )
2727
add_subdirectory( appimage )
2828
endif()
2929

Dockerfile

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
FROM ubuntu:18.04
2+
3+
# Miniconda requires bash as the default shell.
4+
SHELL ["/bin/bash", "-c"]
5+
6+
7+
RUN apt-get update && apt-get install -y \
8+
build-essential \
9+
file \
10+
git \
11+
libglu1-mesa-dev \
12+
libtbb-dev \
13+
patchelf \
14+
wget \
15+
xorg-dev \
16+
&& rm -rf /var/lib/apt/lists/*
17+
18+
# Miniconda
19+
ENV PATH="/root/miniconda3/bin:${PATH}"
20+
RUN wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh \
21+
&& bash Miniconda3-latest-Linux-x86_64.sh -b \
22+
&& rm Miniconda3-latest-Linux-x86_64.sh \
23+
&& conda --version
24+
25+
26+
COPY docker_entrypoint.sh /docker_entrypoint.sh
27+
28+
ENTRYPOINT ["/docker_entrypoint.sh"]

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,13 @@ If you find this repository useful please cite our [paper](https://openaccess.th
2525
## Dependencies
2626

2727
## Packages for building the library
28-
- Pytorch 1.8.1 (can be installed with `python -m pip install torch==1.8.1+cpu -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html`)
28+
- Pytorch 1.8.2 (can be installed with `python -m pip install torch==1.8.2+cpu -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html`)
2929
- On Ubuntu the following packages are required: patchelf, xorg-dev, libglu1-mesa-dev, python3-dev
3030
These can be installed with `apt install patchelf xorg-dev libglu1-mesa-dev python3-dev`
3131

3232
## Packages required for training the network
3333
- Tensorflow 2.6.0
34-
- Open3D 0.14 or later with ML module (https://github.com/intel-isl/Open3D/)
34+
- Open3D 0.14 or later with ML module (https://github.com/isl-org/Open3D/)
3535
- Tensorpack DataFlow (for reading data, ```pip install --upgrade git+https://github.com/tensorpack/dataflow.git```)
3636
- python-prctl (needed by Tensorpack DataFlow; depends on libcap-dev, install with ```apt install libcap-dev``` )
3737
- msgpack (```pip install msgpack``` )

appimage/CMakeLists.txt

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,21 @@
11

22
set( appdir "${CMAKE_CURRENT_BINARY_DIR}/AppDir" )
3-
file( GLOB_RECURSE asr_model_files ${PROJECT_SOURCE_DIR}/models/v*/model*.pt )
4-
file( GLOB_RECURSE dataset_attribution_files ${PROJECT_SOURCE_DIR}/datasets/*/*_attribution.txt )
5-
list( APPEND asr_model_files dummy_resource_file )
3+
file( GLOB_RECURSE asr_resources_files
4+
${PROJECT_SOURCE_DIR}/models/v*/model*.pt
5+
${PROJECT_SOURCE_DIR}/datasets/*/*_attribution.txt
6+
)
7+
list( APPEND asr_resources_files dummy_resource_file )
68

79
find_program(linuxdeploy_binary linuxdeploy-x86_64.AppImage )
810
if( NOT linuxdeploy_binary )
911
message( FATAL_ERROR "Building the appimage requires 'linuxdeploy-x86_64.AppImage' from https://github.com/linuxdeploy/linuxdeploy/releases/tag/continuous")
1012
endif()
1113

12-
add_custom_target( appimage
13-
COMMAND ${CMAKE_COMMAND} -E rm -r "${appdir}"
14+
add_custom_target( appimage ALL
15+
COMMAND ${CMAKE_COMMAND} -E rm -rf "${appdir}"
1416
COMMAND ${CMAKE_COMMAND} -E touch dummy_resource_file
1517
COMMAND ${CMAKE_COMMAND} -E make_directory "${appdir}/usr/lib/asr_resources"
16-
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${asr_model_files} "${appdir}/usr/lib/asr_resources"
17-
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${dataset_attribution_files} "${appdir}/usr/lib/asr_resources"
18+
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${asr_resources_files} "${appdir}/usr/lib/asr_resources"
1819
COMMAND LD_LIBRARY_PATH=${CMAKE_CURRENT_BINARY_DIR}/../_deps/libtorch-src/lib VERSION=${PROJECT_VERSION} ${linuxdeploy_binary} --create-desktop-file --appdir "${appdir}" -e $<TARGET_FILE:asrtool> -i ${CMAKE_CURRENT_SOURCE_DIR}/asrtool.png -o appimage
1920
DEPENDS asrtool
2021
)

build_with_docker.sh

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
#!/bin/bash
2+
3+
function printhelp {
4+
usage="
5+
Usage: $(basename $0) [OPTIONS]
6+
7+
Example:
8+
./$(basename $0) --wheel 3.9 --appimage
9+
10+
Options:
11+
--wheel <python version> Build a python wheel with the specified version
12+
--appimage Build the AppImage
13+
--help, -h Print this help
14+
"
15+
echo "$usage"
16+
exit 0
17+
}
18+
19+
PYTHON_VERSION=3.9
20+
BUILD_PYTHON_MODULE=OFF
21+
BUILD_APPIMAGE=OFF
22+
23+
if [ $# -eq 0 ]; then
24+
printhelp
25+
fi
26+
27+
parse_iter=0
28+
while [ $parse_iter -lt 100 ] ; do
29+
parse_iter=$((parse_iter+1))
30+
case "$1" in
31+
--wheel) PYTHON_VERSION="$2"; BUILD_PYTHON_MODULE=ON ; shift 2 ;;
32+
--appimage) BUILD_APPIMAGE=ON ; shift ;;
33+
--help | -h) printhelp ; shift ;;
34+
*) break ;;
35+
esac
36+
done
37+
set -euo pipefail
38+
39+
REPO_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
40+
41+
docker build -t adaptive-surface-reconstruction .
42+
43+
docker run \
44+
--rm \
45+
--workdir /workspace \
46+
-e PYTHON_VERSION="$PYTHON_VERSION" \
47+
-e BUILD_PYTHON_MODULE="$BUILD_PYTHON_MODULE" \
48+
-e BUILD_APPIMAGE="$BUILD_APPIMAGE" \
49+
-e MAKE_FLAGS="-j" \
50+
-v "$REPO_ROOT":/workspace \
51+
adaptive-surface-reconstruction

cmake/external_deps.cmake

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,12 @@ include(ExternalProject)
33

44
find_package( Python COMPONENTS Interpreter )
55

6-
# show downloads during configure step
7-
set( FETCHCONTENT_QUIET OFF CACHE BOOL "" FORCE )
6+
# show downloads during configure step but not on github
7+
if(DEFINED ENV{GITHUB_WORKSPACE})
8+
set( FETCHCONTENT_QUIET ON CACHE BOOL "" FORCE )
9+
else()
10+
set( FETCHCONTENT_QUIET OFF CACHE BOOL "" FORCE )
11+
endif()
812

913
find_program( patchelf_binary patchelf REQUIRED )
1014

datasets/create_t10k_msgpacks.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@ def read_compressed_msgpack(path, decompressor=None):
3030

3131
def select_good_meshes(info_dict, data_dir):
3232
# select only good meshes
33-
raw_meshes_dir = os.path.join(data_dir,'raw_meshes')
33+
if data_dir:
34+
raw_meshes_dir = os.path.join(data_dir,'raw_meshes')
3435
selected_meshes = []
3536
attribution = []
3637
selection = {
@@ -63,7 +64,8 @@ def select_good_meshes(info_dict, data_dir):
6364
break;
6465
if selected and info['License'] in licenses:
6566
attribution.append('"{}"({}) by {} is licensed under {}'.format(info['title'].strip(), info['Thing ID'], info['author'], info['License']))
66-
selected_meshes.append(glob(os.path.join(raw_meshes_dir,key+'.*'))[0])
67+
if data_dir:
68+
selected_meshes.append(glob(os.path.join(raw_meshes_dir,key+'.*'))[0])
6769

6870
return selected_meshes, attribution
6971

@@ -105,11 +107,14 @@ def create_data(mesh_paths, output_path):
105107
def main():
106108
parser = argparse.ArgumentParser(description="Create data files for training",
107109
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
108-
parser.add_argument("--data_dir", type=str, required=True, help="The path to the Thingi10k dataset root.")
110+
parser.add_argument("--data_dir", type=str, default=None, help="The path to the Thingi10k dataset root.")
109111
parser.add_argument("--output_dir", type=str, default=os.path.join(os.path.dirname(__file__), 't10k'), help="The path to the output dir")
110112
parser.add_argument("--attribution_file_only", action="store_true", help="Create only the attribution file")
111113

112114
args = parser.parse_args()
115+
if not args.attribution_file_only and not args.data_dir:
116+
print("Please specify the path to the Thingi10K root with '--data-dir'")
117+
return
113118

114119
info_dict = read_compressed_msgpack(os.path.join(os.path.dirname(__file__),'thingi10k_info.msgpack.zst'))
115120

0 commit comments

Comments
 (0)