Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds dockerisation and ML inference #10

Merged
merged 6 commits into from
Mar 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions .github/workflows/deploy_docker.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#
name: Create and publish a Docker image

# Configures this workflow to run every time a change is pushed to the branch called `release`.
on:
push:
branches: ['main']

# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
jobs:
build-and-push-image:
runs-on: ubuntu-latest
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
contents: read
packages: write
#
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
22 changes: 22 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# Use the conda-forge base image with Python
FROM mambaorg/micromamba:1.5.8


# set environment variables
ENV PYTHONUNBUFFERED 1

RUN micromamba config append channels conda-forge
RUN micromamba config append channels openeye

COPY --chown=$MAMBA_USER:$MAMBA_USER devtools/conda-envs/falcbot.yaml /tmp/env.yaml
COPY --chown=$MAMBA_USER:$MAMBA_USER . /home/mambauser/FALCBot

RUN micromamba install -y -n base git -f /tmp/env.yaml && \
micromamba clean --all --yes

ARG MAMBA_DOCKERFILE_ACTIVATE=1

WORKDIR /home/mambauser/FALCBot

RUN mkdir /home/mambauser/.OpenEye
ENV OE_LICENSE=/home/mambauser/.OpenEye/oe_license.txt
15 changes: 15 additions & 0 deletions compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
version: '3'

services:
falcbot:
command: python falcbot/falcbot.py
build: .
container_name: "slackbot"
ports:
- 80:80
volumes:
- type: bind
source: $HOME/.OpenEye/
target: /home/mambauser/.OpenEye/
env_file:
- .env
19 changes: 19 additions & 0 deletions devtools/conda-envs/falcbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,23 @@ dependencies:
- pydantic<2.0a0


# ml
- pytorch
- pytorch_geometric >=2.5.0
- pytorch_cluster
- pytorch_scatter
- pytorch_sparse
- numpy
- h5py
- e3nn
- dgl
- dgllife
- pooch
- mtenn >=0.5.1
- wandb
- semver


# alchemy
- numpy
- gufe =>0.9.5
Expand All @@ -45,5 +62,7 @@ dependencies:
- git+https://github.com/choderalab/asapdiscovery@main#egg=asapdiscovery-data&subdirectory=asapdiscovery-data
- git+https://github.com/choderalab/asapdiscovery@main#egg=asapdiscovery-modeling&subdirectory=asapdiscovery-modeling
- git+https://github.com/choderalab/asapdiscovery@main#egg=asapdiscovery-docking&subdirectory=asapdiscovery-docking
- git+https://github.com/choderalab/asapdiscovery@main#egg=asapdiscovery-ml&subdirectory=asapdiscovery-ml



1 change: 1 addition & 0 deletions devtools/deployment/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
BLAH=BLAH
54 changes: 45 additions & 9 deletions falcbot/falcbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,16 @@
from asapdiscovery.data.services.aws.cloudfront import CloudFront
from asapdiscovery.data.services.aws.s3 import S3

from asapdiscovery.ml.inference import GATInference
from asapdiscovery.data.services.postera.manifold_data_validation import TargetTags


from rdkit import Chem

from multiprocessing import cpu_count

# logger in a global context
logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)


class SlackSettings(BaseSettings):
Expand Down Expand Up @@ -80,15 +86,18 @@ def _link_to_block_data(link, text):
}


@app.message(re.compile("(hi|hello|hey)"))
def say_hello_regex(say, context):
# regular expression matches are inside of context.matches
print(context)
greeting = context["matches"][0]
say(f"{greeting}, how are you?")

def _is_valid_smiles(smi):
m = Chem.MolFromSmiles(smi)
if m is None:
return False
else:
return True

def _rdkit_smiles_roundtrip(smi: str) -> str:
mol = Chem.MolFromSmiles(smi)
return Chem.MolToSmiles(mol)

@app.message(re.compile("(.*)are you alive(.*)"))
@app.message(re.compile("(.*)are you alive falcbot(.*)"))
def are_you_alive(say, context):
say(f"yes im alive!")

Expand Down Expand Up @@ -379,6 +388,33 @@ def plan_and_submit_from_ligand_and_receptor(): ...
def submit_from_planned_network(): ... # do something with settings


@app.message(re.compile("infer pIC50 from SMILES"))
def make_pic50_pred(message, say, context, logger):
content = message.get("text")
# parse message for molset using regex
pattern = r"infer pIC50 from SMILES ([\w-]+) for target ([\w-]+)"
match = re.search(pattern, content)
if match:
smiles = match.group(1)
target = match.group(2)
else:
say("Could not find SMILES and Target in the message, unable to proceed")
return
if not _is_valid_smiles(smiles):
say(f"Invalid SMILES {smiles}, unable to proceed")
return
if not target in TargetTags.get_values():
say(f"Invalid target {target}, unable to proceed")
return
# make prediction
smiles = _rdkit_smiles_roundtrip(smiles)
gs = GATInference.from_latest_by_target(target)
pred = gs.predict_from_smiles(smiles)
say(f"Predicted pIC50 for {smiles} is {pred} using model {gs.model_name} :test_tube:")




@app.event("message")
def base_handle_message_events(body, logger):
logger.debug(body)
Expand Down
12 changes: 2 additions & 10 deletions falcbot/tests/test_falcbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
Unit and regression test for the falcbot package.
"""

# Import package, test suite, and other packages as needed
import sys

import pytest

import falcbot


def test_falcbot_imported():
"""Sample test, will always pass so long as import statement worked."""
assert "falcbot" in sys.modules
def test_placeholder():
assert 1 == 1
Loading