Skip to content

Commit

Permalink
Merge pull request #59 from AmpelAstro/gwO4
Browse files Browse the repository at this point in the history
GW O4
  • Loading branch information
jvansanten authored Jan 22, 2024
2 parents 824fdba + cb70aeb commit 7e1455c
Show file tree
Hide file tree
Showing 139 changed files with 112,920 additions and 22,822 deletions.
145 changes: 8 additions & 137 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,140 +14,11 @@ on:
- dev/*

jobs:
test:
runs-on: ubuntu-latest

services:
mongo:
image: mongo:6
ports:
- 27017:27017

steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install dependencies (with all extras)
run: |
python -m pip install --upgrade poetry
poetry install $(cat pyproject.toml | awk '/^\s*$/{f=0};f{if ($1!="docs") printf(" -E %s",$1)};/\[tool\.poetry\.extras\]/{f=1}')
- run: poetry run pytest
env:
MONGO_PORT: ${{ job.services.mongo.ports[27017] }}

mypy:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10.6'
- name: Install dependencies (with all extras)
run: |
python -m pip install --upgrade poetry
poetry install $(cat pyproject.toml | awk '/^\s*$/{f=0};f{if ($1!="docs") printf(" -E %s",$1)};/\[tool\.poetry\.extras\]/{f=1}')
- run: poetry run mypy --namespace-packages -p ampel

check_version:
name: Tag version bump
runs-on: ubuntu-latest
# run only on pushes, not PRs
if: ${{ github.event_name == 'push' && github.base_ref == null }}
needs: [test, mypy]
outputs:
should_publish: ${{ steps.check.outputs.result }}
steps:
- uses: actions/setup-node@v3
with:
node-version: 14
- run: npm install toml@3.0.0
- name: Ensure tag for version bump
id: check
uses: actions/github-script@v6
with:
script: |
const toml = require('toml')
async function getVersion(ref) {
try {
response = await github.rest.repos.getContent({
repo: context.repo.repo,
owner: context.repo.owner,
path: 'pyproject.toml',
ref: ref
});
return toml.parse(Buffer.from(response.data.content, 'base64').toString())
.tool
.poetry
.version;
} catch (exc) {
if (exc.name == 'HttpError' && exc.status == 404) {
return null;
} else {
throw exc;
}
}
}
after = await getVersion(context.payload.after);
ref = `refs/tags/v${after}`
is_main = context.payload.ref === `refs/heads/${context.payload.repository.default_branch}`
// a tag matching the version was just pushed
let release = context.payload.ref == ref;
if (release) {
console.log(`Tag v${after} pushed (${context.sha})`);
}
// the version on the default branch changed; create a tag
if (!release && is_main) {
before = await getVersion(context.payload.before);
if (before !== after) {
console.log(`Version bumped: ${before} -> ${after}`);
try {
await github.rest.git.createRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref,
sha: context.sha
});
console.log(`Tag v${after} created (${context.sha})`);
release = true;
} catch (exc) {
// tag already existed
if (exc.name == 'HttpError' && exc.status == 422) {
console.log(`Skipping publish (tag v${after} already exists)`);
release = false;
} else {
throw exc;
}
}
} else {
console.log(`Skipping publish (version is still ${before})`);
}
} else if (!is_main) {
console.log(`Skipping publish (push to ${context.payload.ref} != refs/heads/${context.payload.repository.default_branch})`);
}
return release;
pypi:
name: Publish to PyPI
runs-on: ubuntu-latest
needs: [check_version]
# NB: outputs are always strings; explicitly parse to get a boolean
if: ${{ fromJSON(needs.check_version.outputs.should_publish) }}

steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install poetry
run: |
python -m pip install --upgrade poetry
- name: Publish
run: |
poetry config repositories.testpypi https://test.pypi.org/legacy/
poetry publish -n --build
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
POETRY_PYPI_TOKEN_TESTPYPI: ${{ secrets.PYPI_TEST_API_TOKEN }}
ci:
uses: AmpelProject/Ampel-interface/.github/workflows/ci.yml@ci-py12-v2
secrets:
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
with:
mongo: true
python-version: "3.10"
ruff: true
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,16 @@
/ampel_contrib_hu.egg-info/
/ampel_hu_astro.egg-info/
/build/
/dist/
__pycache__
*.swp
*.swo
.DS_Store
.mypy_cache
*.gz
*.png
*.pdf
*.html
*.csv
*.pt
*.pkl
80 changes: 80 additions & 0 deletions ampel/contrib/hu/alert/DynamicShaperAlertConsumer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File : Ampel-HU-astro/ampel/alert/DynamicShaperAlertConsumer.py
# License : BSD-3-Clause
# Author : jno
# Date : 28.03.2023
# Last Modified Date: 28.03.2023
# Last Modified By : jno


from ampel.alert.AlertConsumer import AlertConsumer
from ampel.ingest.ChainedIngestionHandler import ChainedIngestionHandler
from ampel.log import AmpelLogger
from ampel.model.ingest.CompilerOptions import CompilerOptions
from ampel.model.UnitModel import UnitModel
from ampel.mongo.update.DBUpdatesBuffer import DBUpdatesBuffer


class DynamicShaperAlertConsumer(AlertConsumer):
"""
Extension of standard AlertConsumer where the configuration of the shaper can be updated based
(dynamic) resources available to the EventHandler.
Use case is when a config parameter is created dynamically in the process of running the job
and should be added to the alert information stored into the db
:param shaper_map:
Transfer values in the shaper_map to a corresponding entry in the alert shaper config.
Ok, new take on this. Instead of changing the directives (which are already hashed and set), we wish to
add a new datapoint to represent the map. To do so we minimally have to change the ZiDataPointShaper since
this only accepts ztf like datapoints (detections or upper limits). We here wish to have a datapoint which is
shared by all stocks (if possible). So maybe this is where all changes should go?
The shaper is initaialized by the ChainedIngestionHandler, so should be fine to change config here.
Steps:
1. Create version of ZiDataPointShaper which adds GW datapoint.
2. Change name of this class to something shaper related, then change logic below to instead create
appropriate config based on the resources.
"""

shaper_map: dict[str, str]

# Overload
def get_ingestion_handler(
self, run_id: int, updates_buffer: DBUpdatesBuffer, logger: AmpelLogger
) -> ChainedIngestionHandler:
# Update shaper
# print('config first', self.shaper)
newconfig = {
config_key: self.alert_supplier.resources[resource_name].value
for config_key, resource_name in self.shaper_map.items()
if resource_name in self.alert_supplier.resources
}
# print('config pure', newconfig)
if isinstance(self.shaper.config, dict):
newconfig = self.shaper.config | newconfig
shaper = UnitModel(
unit=self.shaper.unit,
config=newconfig,
secrets=self.shaper.secrets,
override=self.shaper.override,
)
# print('config after', shaper)

return ChainedIngestionHandler(
self.context,
shaper,
self.directives,
updates_buffer,
run_id,
tier=0,
logger=logger,
database=self.database,
trace_id={"alertconsumer": self._trace_id},
compiler_opts=self.compiler_opts or CompilerOptions(),
)
34 changes: 14 additions & 20 deletions ampel/contrib/hu/alert/NeoWisePhotometryAlertSupplier.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,17 @@
# Last Modified Date:
# Last Modified By :

from typing import Any, Literal, Dict, List, Optional, Sequence, Any, Tuple
import sys, os
from bson import encode
import sys
from hashlib import blake2b
from ampel.ztf.util.ZTFIdMapper import to_ampel_id
from ampel.view.ReadOnlyDict import ReadOnlyDict
from ampel.alert.BaseAlertSupplier import BaseAlertSupplier
from ampel.alert.AmpelAlert import AmpelAlert
from ampel.protocol.AmpelAlertProtocol import AmpelAlertProtocol
from ampel.types import Tag
from typing import Literal

import numpy as np
import pandas as pd
import astropy
import json
from astropy.io import ascii
from io import BytesIO
from bson import encode

from ampel.alert.AmpelAlert import AmpelAlert
from ampel.alert.BaseAlertSupplier import BaseAlertSupplier
from ampel.view.ReadOnlyDict import ReadOnlyDict


class NeoWisePhotometryAlertSupplier(BaseAlertSupplier):
Expand Down Expand Up @@ -77,14 +72,13 @@ def __next__(self) -> AmpelAlert:
df["W1_flux_density_ul"].replace(1.0, "True", inplace=True)
df["W2_flux_density_ul"].replace(1.0, "True", inplace=True)

if "timewise_metadata" in d[1].keys():

if "timewise_metadata" in d[1]:
# calculate reduced chi2
timewise_metadata = d[1]["timewise_metadata"]
for b in ["W1", "W2"]:
timewise_metadata[f"{b}_red_chi2"] = (
timewise_metadata[f"{b}_chi2_to_med_flux_density"] /
(timewise_metadata[f"{b}_N_datapoints_flux_density"] - 1)
timewise_metadata[f"{b}_chi2_to_med_flux_density"]
/ (timewise_metadata[f"{b}_N_datapoints_flux_density"] - 1)
if timewise_metadata[f"{b}_N_datapoints_flux_density"] > 1
else np.nan
)
Expand Down Expand Up @@ -167,7 +161,7 @@ def __next__(self) -> AmpelAlert:
df_W2["mag_Npoints"] = df["W2_mag_Npoints"]
df_W1["flux_density_Npoints"] = df["W1_flux_density_Npoints"]
df_W2["flux_density_Npoints"] = df["W2_flux_density_Npoints"]
if "ra" in d[1].keys():
if "ra" in d[1]:
df_W1["ra"] = d[1]["ra"]
df_W1["dec"] = d[1]["dec"]
df_W2["ra"] = d[1]["ra"]
Expand Down Expand Up @@ -206,7 +200,7 @@ def __next__(self) -> AmpelAlert:

all_ids = b""
pps = []
for index, row in df_W1.iterrows():
for _, row in df_W1.iterrows():
pp = dict(row)
pp_hash = blake2b(encode(pp), digest_size=7).digest()
if self.counter:
Expand All @@ -217,7 +211,7 @@ def __next__(self) -> AmpelAlert:

all_ids += pp_hash
pps.append(ReadOnlyDict(pp))
for index, row in df_W2.iterrows():
for _, row in df_W2.iterrows():
pp = dict(row)
pp_hash = blake2b(encode(pp), digest_size=7).digest()
if self.counter:
Expand Down
Loading

0 comments on commit 7e1455c

Please sign in to comment.