Compare commits
12 Commits
fba68c6c90
...
6d3ef7214c
Author | SHA1 | Date |
---|---|---|
Samuel Ortion | 6d3ef7214c | |
Samuel Ortion | 85c92cee06 | |
Samuel Ortion | ca28fc6242 | |
Samuel Ortion | 6f7c27d62e | |
Samuel Ortion | f2f90d8406 | |
Samuel Ortion | d6763f6e54 | |
Samuel Ortion | 97c22977d0 | |
Samuel Ortion | 7a175e7f0a | |
Samuel Ortion | 4f09a2dd4e | |
Samuel Ortion | 39233fe937 | |
Samuel Ortion | daffa3ff96 | |
Samuel Ortion | a47304a961 |
|
@ -3,8 +3,9 @@
|
||||||
|
|
||||||
- Add docker compose port
|
- Add docker compose port
|
||||||
- Improve install script
|
- Improve install script
|
||||||
|
- Add base uninstall script (need deeper work)
|
||||||
- Add ttyd for systemd logging
|
- Add ttyd for systemd logging
|
||||||
## v0.0.1-rc
|
## v0.0.1-rc (2022-08-18)
|
||||||
|
|
||||||
- Integrate BirdNET-Analyzer as submodule
|
- Integrate BirdNET-Analyzer as submodule
|
||||||
- Add birdnet_recording service
|
- Add birdnet_recording service
|
||||||
|
|
22
INSTALL.md
22
INSTALL.md
|
@ -2,24 +2,6 @@
|
||||||
|
|
||||||
This guide allow you to install BirdNET-stream step by step on your debian based system.
|
This guide allow you to install BirdNET-stream step by step on your debian based system.
|
||||||
|
|
||||||
For a one-liner installation, you can use the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -sL https://raw.githubusercontent.com/UncleSamulus/BirdNET-stream/main/install.sh | bash
|
|
||||||
```
|
|
||||||
|
|
||||||
For debug purposes, you can use the following command, it will log the installation steps to the console:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
DEBUG=1 ./install.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need to use a specific branch (e.g. dev), you can use the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
BRANCH=dev ./install.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
- git
|
- git
|
||||||
|
@ -129,7 +111,7 @@ sudo mv /composer.phar /usr/local/bin/composer
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd www
|
cd www
|
||||||
composer install
|
composer install --no-dev --prefer-dist --optimize-autoloader
|
||||||
```
|
```
|
||||||
|
|
||||||
### Install nodejs and npm
|
### Install nodejs and npm
|
||||||
|
@ -147,7 +129,7 @@ nvm use 16
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo dnf install npm
|
sudo apt-get install npm
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|
|
@ -42,7 +42,7 @@ On debian based systems (tested on Debian Bullseye), the following command shoul
|
||||||
# Change to your installation directory here, /home/$USER/Documents/BirdNET-stream for instance, or /opt/birdnet-stream, or whatever
|
# Change to your installation directory here, /home/$USER/Documents/BirdNET-stream for instance, or /opt/birdnet-stream, or whatever
|
||||||
cd /path/to/installation/directory
|
cd /path/to/installation/directory
|
||||||
# Download installation script
|
# Download installation script
|
||||||
curl -0 https://raw.githubusercontent.com/UncleSamulus/BirdNET-stream/main/install.sh
|
curl -O https://raw.githubusercontent.com/UncleSamulus/BirdNET-stream/main/install.sh
|
||||||
# Run installation script:
|
# Run installation script:
|
||||||
chmod +x ./install.sh
|
chmod +x ./install.sh
|
||||||
./install.sh
|
./install.sh
|
||||||
|
|
|
@ -19,6 +19,4 @@ PYTHON_VENV="./.venv/birdnet-stream"
|
||||||
WORKDIR="/home/$USER/BirdNET-stream"
|
WORKDIR="/home/$USER/BirdNET-stream"
|
||||||
# Database location
|
# Database location
|
||||||
DATABASE="./var/db.sqlite"
|
DATABASE="./var/db.sqlite"
|
||||||
|
# DATABASE="mysql://birdnet:secret@localhost:3306/birdnet_observations" # uncomment and change 'secret' if you want to use a mariadb (mysql) database instea of sqlite
|
||||||
DAEMON_USER="birdnet"
|
|
||||||
DAEMON_PASSWORD="secret"
|
|
||||||
|
|
|
@ -3,9 +3,7 @@ set -e
|
||||||
|
|
||||||
DEBUG=${DEBUG:-1}
|
DEBUG=${DEBUG:-1}
|
||||||
debug() {
|
debug() {
|
||||||
if [ $DEBUG -eq 1 ]; then
|
[[ $DEBUG -eq 1 ]] && echo "$@"
|
||||||
echo "$1"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
config_filepath="./config/birdnet.conf"
|
config_filepath="./config/birdnet.conf"
|
||||||
|
@ -64,7 +62,9 @@ check_prerequisites() {
|
||||||
|
|
||||||
# Get array of audio chunks to be processed
|
# Get array of audio chunks to be processed
|
||||||
get_chunk_list() {
|
get_chunk_list() {
|
||||||
find "${CHUNK_FOLDER}/in" -type f -name '*.wav' -exec basename {} \; ! -size 0 | sort
|
chunk_list=($(ls ${CHUNK_FOLDER}/in))
|
||||||
|
echo "${chunk_list}"
|
||||||
|
# find "${CHUNK_FOLDER}/in" -type f -name '*.wav' -exec basename {} \; ! -size 0 | sort
|
||||||
}
|
}
|
||||||
|
|
||||||
# Perform audio chunk analysis on one chunk
|
# Perform audio chunk analysis on one chunk
|
||||||
|
@ -75,13 +75,22 @@ analyze_chunk() {
|
||||||
mkdir -p "$output_dir"
|
mkdir -p "$output_dir"
|
||||||
date=$(echo $chunk_name | cut -d'_' -f2)
|
date=$(echo $chunk_name | cut -d'_' -f2)
|
||||||
week=$(./daemon/weekof.sh $date)
|
week=$(./daemon/weekof.sh $date)
|
||||||
$PYTHON_EXECUTABLE ./analyzer/analyze.py --i $chunk_path --o "$output_dir/model.out.csv" --lat $LATITUDE --lon $LONGITUDE --week $week --min_conf $CONFIDENCE --threads 4 --rtype csv
|
if [[ ! -z "${THREADS}" ]]; then
|
||||||
|
threads="--threads ${THREADS}"
|
||||||
|
else
|
||||||
|
threads=""
|
||||||
|
fi
|
||||||
|
$PYTHON_EXECUTABLE ./analyzer/analyze.py --i $chunk_path --o "$output_dir/model.out.csv" --lat $LATITUDE --lon $LONGITUDE --week $week --min_conf $CONFIDENCE $threads --rtype csv
|
||||||
debug "Model output written to $output_dir/model.out.csv"
|
debug "Model output written to $output_dir/model.out.csv"
|
||||||
|
bash ./daemon/birdnet_output_to_sql.sh "$output_dir/model.out.csv"
|
||||||
|
debug "Dumped to SQL database"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Perform audio chunk analysis on all recorded chunks
|
# Perform audio chunk analysis on all recorded chunks
|
||||||
analyze_chunks() {
|
analyze_chunks() {
|
||||||
for chunk_name in $(get_chunk_list); do
|
local chunks
|
||||||
|
chunks="${1}"
|
||||||
|
for chunk_name in "${chunks}"; do
|
||||||
if [[ -f "${CHUNK_FOLDER}/out/$chunk_name.d/model.out.csv" ]]; then
|
if [[ -f "${CHUNK_FOLDER}/out/$chunk_name.d/model.out.csv" ]]; then
|
||||||
debug "Skipping $chunk_name, as it has already been analyzed"
|
debug "Skipping $chunk_name, as it has already been analyzed"
|
||||||
else
|
else
|
||||||
|
@ -98,4 +107,4 @@ check_prerequisites
|
||||||
chunks=$(get_chunk_list)
|
chunks=$(get_chunk_list)
|
||||||
|
|
||||||
# Analyze all chunks in working directory
|
# Analyze all chunks in working directory
|
||||||
analyze_chunks $chunks
|
analyze_chunks "$chunks"
|
|
@ -1,15 +1,13 @@
|
||||||
#! /usr/bin/env bash
|
#! /usr/bin/env bash
|
||||||
# Extract observations from a model output folder
|
# Extract observations from a model output file into SQL database
|
||||||
#
|
#
|
||||||
|
|
||||||
DEBUG=${DEBUG:-1}
|
DEBUG=${DEBUG:-1}
|
||||||
set -e
|
set -e
|
||||||
# set -x
|
# set -x
|
||||||
|
DEBUG=${DEBUG:-1}
|
||||||
debug() {
|
debug() {
|
||||||
if [ $DEBUG -eq 1 ]; then
|
[[ $DEBUG -eq 1 ]] && echo "$@"
|
||||||
echo "$1"
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Load bash library to deal with BirdNET-stream database
|
# Load bash library to deal with BirdNET-stream database
|
||||||
|
@ -18,16 +16,6 @@ source ./daemon/database/scripts/database.sh
|
||||||
# Load config
|
# Load config
|
||||||
source ./config/birdnet.conf
|
source ./config/birdnet.conf
|
||||||
# Check config
|
# Check config
|
||||||
if [[ -z ${CHUNK_FOLDER} ]]; then
|
|
||||||
echo "CHUNK_FOLDER is not set"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
if [[ ! -d ${CHUNK_FOLDER}/out ]]; then
|
|
||||||
echo "CHUNK_FOLDER does not exist: ${CHUNK_FOLDER}/out"
|
|
||||||
echo "Cannot extract observations."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z ${LATITUDE} ]]; then
|
if [[ -z ${LATITUDE} ]]; then
|
||||||
echo "LATITUDE is not set"
|
echo "LATITUDE is not set"
|
||||||
|
@ -39,10 +27,6 @@ if [[ -z ${LONGITUDE} ]]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
model_outputs() {
|
|
||||||
ls ${CHUNK_FOLDER}/out/*/model.out.csv
|
|
||||||
}
|
|
||||||
|
|
||||||
source_wav() {
|
source_wav() {
|
||||||
model_output_path=$1
|
model_output_path=$1
|
||||||
model_output_dir=$(dirname $model_output_path)
|
model_output_dir=$(dirname $model_output_path)
|
||||||
|
@ -107,13 +91,6 @@ save_observations() {
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
main() {
|
model_output_path="$1"
|
||||||
# # Remove all junk observations
|
|
||||||
# ./daemon/birdnet_clean.sh
|
|
||||||
# Get model outputs
|
|
||||||
for model_output in $(model_outputs); do
|
|
||||||
save_observations $model_output
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
save_observations $model_output_path
|
|
@ -17,5 +17,16 @@ if [ -z "$DATABASE" ]; then
|
||||||
DATABASE="./var/db.sqlite"
|
DATABASE="./var/db.sqlite"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create database according to schema in structure.sql
|
if [[ $DATABASE = "mysql://"* ]]; then
|
||||||
sqlite3 "$DATABASE" < ./daemon/database/structure.sql
|
# Split mysql uri into user, password, host, port, and database
|
||||||
|
MYSQL_ADDRESS=$(echo "$DATABASE" | sed 's/mysql:\/\///g')
|
||||||
|
MYSQL_CREDENTIALS=$(echo "$MYSQL_ADDRESS" | cut -d@ -f1)
|
||||||
|
MYSQL_USER=$(echo "$MYSQL_CREDENTIALS" | cut -d: -f1)
|
||||||
|
MYSQL_PASSWORD=$(echo "$MYSQL_CREDENTIALS" | cut -d: -f2)
|
||||||
|
MYSQL_HOST=$(echo "$MYSQL_ADDRESS" | cut -d@ -f2 | cut -d: -f1)
|
||||||
|
MYSQL_PORT=$(echo "$MYSQL_ADDRESS" | cut -d@ -f2 | cut -d: -f2 | cut -d/ -f1)
|
||||||
|
MYSQL_DATABASE=$(echo "$MYSQL_ADDRESS" | cut -d/ -f2)
|
||||||
|
mysql -u$MYSQL_USER -p$MYSQL_PASSWORD -h$MYSQL_HOST -P$MYSQL_PORT -D$MYSQL_DATABASE < ./daemon/database/structure-mysql.sql
|
||||||
|
else
|
||||||
|
sqlite3 $DATABASE < ./daemon/database/structure-sqlite.sql
|
||||||
|
fi
|
||||||
|
|
|
@ -8,10 +8,30 @@ source ./config/birdnet.conf
|
||||||
# Create database in case it was not created yet
|
# Create database in case it was not created yet
|
||||||
./daemon/database/scripts/create.sh
|
./daemon/database/scripts/create.sh
|
||||||
|
|
||||||
DATABASE=${DATABASE:-"./var/db.sqlite"}
|
# Check if database location is specified
|
||||||
|
if [ -z "$DATABASE" ]; then
|
||||||
|
echo "DATABASE location not specified"
|
||||||
|
echo "Defaults to ./var/db.sqlite"
|
||||||
|
DATABASE="./var/db.sqlite"
|
||||||
|
fi
|
||||||
|
|
||||||
query() {
|
query() {
|
||||||
sqlite3 -cmd ".timeout 1000" $DATABASE "$1"
|
local stmt
|
||||||
|
stmt="$1"
|
||||||
|
if [[ $DATABASE = "mysql://"* ]]; then
|
||||||
|
# Split mysql uri into user, password, host, port, and database
|
||||||
|
MYSQL_ADDRESS=$(echo "$DATABASE" | sed 's/mysql:\/\///g')
|
||||||
|
MYSQL_CREDENTIALS=$(echo "$MYSQL_ADDRESS" | cut -d@ -f1)
|
||||||
|
MYSQL_USER=$(echo "$MYSQL_CREDENTIALS" | cut -d: -f1)
|
||||||
|
MYSQL_PASSWORD=$(echo "$MYSQL_CREDENTIALS" | cut -d: -f2)
|
||||||
|
MYSQL_HOST=$(echo "$MYSQL_ADDRESS" | cut -d@ -f2 | cut -d: -f1)
|
||||||
|
MYSQL_PORT=$(echo "$MYSQL_ADDRESS" | cut -d@ -f2 | cut -d: -f2 | cut -d/ -f1)
|
||||||
|
MYSQL_DATABASE=$(echo "$MYSQL_ADDRESS" | cut -d/ -f2)
|
||||||
|
mysql -u$MYSQL_USER -p$MYSQL_PASSWORD -h$MYSQL_HOST -P$MYSQL_PORT -D$MYSQL_DATABASE -e "$stmt"
|
||||||
|
else
|
||||||
|
sqlite3 -cmd ".timeout 1000" "$DATABASE" "$stmt"
|
||||||
|
fi
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get_location_id() {
|
get_location_id() {
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
/** Database structure for BirdNET-stream SQLite*/
|
||||||
|
|
||||||
|
/** Taxon table */
|
||||||
|
CREATE TABLE IF NOT EXISTS taxon (
|
||||||
|
taxon_id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
||||||
|
scientific_name TEXT NOT NULL,
|
||||||
|
common_name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
/** Location table */
|
||||||
|
CREATE TABLE IF NOT EXISTS location (
|
||||||
|
location_id INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
||||||
|
latitude REAL NOT NULL,
|
||||||
|
longitude REAL NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
/** Observation table */
|
||||||
|
CREATE TABLE IF NOT EXISTS observation (
|
||||||
|
`observation_id` INTEGER PRIMARY KEY NOT NULL AUTO_INCREMENT,
|
||||||
|
`audio_file` TEXT NOT NULL,
|
||||||
|
`start` REAL NOT NULL,
|
||||||
|
`end` REAL NOT NULL,
|
||||||
|
`taxon_id` INTEGER NOT NULL,
|
||||||
|
`location_id` INTEGER NOT NULL,
|
||||||
|
`date` TEXT NOT NULL,
|
||||||
|
`notes` TEXT,
|
||||||
|
`confidence` REAL NOT NULL,
|
||||||
|
`verified` BOOLEAN DEFAULT 0 CHECK (`verified` IN (0, 1)),
|
||||||
|
FOREIGN KEY(taxon_id) REFERENCES taxon(taxon_id),
|
||||||
|
FOREIGN KEY(location_id) REFERENCES location(location_id)
|
||||||
|
);
|
|
@ -2,21 +2,21 @@
|
||||||
|
|
||||||
/** Taxon table */
|
/** Taxon table */
|
||||||
CREATE TABLE IF NOT EXISTS taxon (
|
CREATE TABLE IF NOT EXISTS taxon (
|
||||||
taxon_id INTEGER PRIMARY KEY,
|
taxon_id INTEGER PRIMARY KEY NOT NULL,
|
||||||
scientific_name TEXT NOT NULL,
|
scientific_name TEXT NOT NULL,
|
||||||
common_name TEXT NOT NULL
|
common_name TEXT NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
/** Location table */
|
/** Location table */
|
||||||
CREATE TABLE IF NOT EXISTS location (
|
CREATE TABLE IF NOT EXISTS location (
|
||||||
location_id INTEGER PRIMARY KEY,
|
location_id INTEGER PRIMARY KEY NOT NULL,
|
||||||
latitude REAL NOT NULL,
|
latitude REAL NOT NULL,
|
||||||
longitude REAL NOT NULL
|
longitude REAL NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
/** Observation table */
|
/** Observation table */
|
||||||
CREATE TABLE IF NOT EXISTS observation (
|
CREATE TABLE IF NOT EXISTS observation (
|
||||||
`observation_id` INTEGER PRIMARY KEY,
|
`observation_id` INTEGER PRIMARY KEY NOT NULL,
|
||||||
`audio_file` TEXT NOT NULL,
|
`audio_file` TEXT NOT NULL,
|
||||||
`start` REAL NOT NULL,
|
`start` REAL NOT NULL,
|
||||||
`end` REAL NOT NULL,
|
`end` REAL NOT NULL,
|
||||||
|
@ -25,7 +25,7 @@ CREATE TABLE IF NOT EXISTS observation (
|
||||||
`date` TEXT NOT NULL,
|
`date` TEXT NOT NULL,
|
||||||
`notes` TEXT,
|
`notes` TEXT,
|
||||||
`confidence` REAL NOT NULL,
|
`confidence` REAL NOT NULL,
|
||||||
`verified` BOOLEAN NOT NULL CHECK (`verified` IN (0, 1)) DEFAULT 0,
|
`verified` BOOLEAN DEFAULT 0 CHECK (`verified` IN (0, 1)),
|
||||||
FOREIGN KEY(taxon_id) REFERENCES taxon(taxon_id),
|
FOREIGN KEY(taxon_id) REFERENCES taxon(taxon_id),
|
||||||
FOREIGN KEY(location_id) REFERENCES location(location_id)
|
FOREIGN KEY(location_id) REFERENCES location(location_id)
|
||||||
);
|
);
|
|
@ -6,66 +6,79 @@ import matplotlib.pyplot as plt
|
||||||
from matplotlib.colors import LogNorm
|
from matplotlib.colors import LogNorm
|
||||||
import seaborn as sns
|
import seaborn as sns
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
|
||||||
CONFIG = {
|
CONFIG = {
|
||||||
"readings": 10,
|
"readings": 10,
|
||||||
"palette": "Greens",
|
"palette": "Greens",
|
||||||
"db": "./var/db.sqlite",
|
"db": "./var/db.sqlite",
|
||||||
"date": datetime.now().strftime("%Y-%m-%d")
|
"date": datetime.now().strftime("%Y-%m-%d"),
|
||||||
# "date": "2022-08-15"
|
"charts_dir": "./var/charts"
|
||||||
}
|
}
|
||||||
|
|
||||||
db = sqlite3.connect(CONFIG['db'])
|
db = None
|
||||||
|
|
||||||
df = pd.read_sql_query("""SELECT common_name, date, location_id, confidence
|
def get_database():
|
||||||
|
global db
|
||||||
|
if db is None:
|
||||||
|
db = sqlite3.connect(CONFIG["db"])
|
||||||
|
return db
|
||||||
|
|
||||||
|
def chart(date):
|
||||||
|
db = get_database()
|
||||||
|
df = pd.read_sql_query(f"""SELECT common_name, date, location_id, confidence
|
||||||
FROM observation
|
FROM observation
|
||||||
INNER JOIN taxon
|
INNER JOIN taxon
|
||||||
ON observation.taxon_id = taxon.taxon_id""", db)
|
ON observation.taxon_id = taxon.taxon_id
|
||||||
df['date'] = pd.to_datetime(df['date'])
|
WHERE STRFTIME("%Y-%m-%d", `date`) = '{date}'""", db)
|
||||||
df['hour'] = df['date'].dt.hour
|
df['date'] = pd.to_datetime(df['date'])
|
||||||
df['date'] = df['date'].dt.date
|
df['hour'] = df['date'].dt.hour
|
||||||
df['date'] = df['date'].astype(str)
|
df['date'] = df['date'].dt.date
|
||||||
df_on_date = df[df['date'] == CONFIG['date']]
|
df['date'] = df['date'].astype(str)
|
||||||
|
df_on_date = df[df['date'] == date]
|
||||||
|
|
||||||
top_on_date = (df_on_date['common_name'].value_counts()[:CONFIG['readings']])
|
top_on_date = (df_on_date['common_name'].value_counts()[:CONFIG['readings']])
|
||||||
if top_on_date.empty:
|
if top_on_date.empty:
|
||||||
print("No observations on {}".format(CONFIG['date']))
|
print("No observations on {}".format(date))
|
||||||
exit()
|
return
|
||||||
|
else:
|
||||||
|
print(f"Found observations on {date}")
|
||||||
|
|
||||||
df_top_on_date = df_on_date[df_on_date['common_name'].isin(top_on_date.index)]
|
df_top_on_date = df_on_date[df_on_date['common_name'].isin(top_on_date.index)]
|
||||||
|
|
||||||
# Create a figure with 2 subplots
|
# Create a figure with 2 subplots
|
||||||
fig, axs = plt.subplots(1, 2, figsize=(20, 5), gridspec_kw=dict(
|
fig, axs = plt.subplots(1, 2, figsize=(20, 5), gridspec_kw=dict(
|
||||||
width_ratios=[2, 6]))
|
width_ratios=[2, 6]))
|
||||||
plt.subplots_adjust(left=None, bottom=None, right=None,
|
plt.subplots_adjust(left=None, bottom=None, right=None,
|
||||||
top=None, wspace=0, hspace=0)
|
top=None, wspace=0, hspace=0)
|
||||||
|
|
||||||
# Get species frequencies
|
# Get species frequencies
|
||||||
frequencies_order = pd.value_counts(df_top_on_date['common_name']).iloc[:CONFIG['readings']].index
|
frequencies_order = pd.value_counts(df_top_on_date['common_name']).iloc[:CONFIG['readings']].index
|
||||||
# Get min max confidences
|
# Get min max confidences
|
||||||
confidence_minmax = df_top_on_date.groupby('common_name')['confidence'].max()
|
confidence_minmax = df_top_on_date.groupby('common_name')['confidence'].max()
|
||||||
confidence_minmax = confidence_minmax.reindex(frequencies_order)
|
confidence_minmax = confidence_minmax.reindex(frequencies_order)
|
||||||
# Norm values for color palette
|
# Norm values for color palette
|
||||||
norm = plt.Normalize(confidence_minmax.values.min(),
|
norm = plt.Normalize(confidence_minmax.values.min(),
|
||||||
confidence_minmax.values.max())
|
confidence_minmax.values.max())
|
||||||
|
|
||||||
colors = plt.cm.Greens(norm(confidence_minmax))
|
colors = plt.cm.Greens(norm(confidence_minmax))
|
||||||
plot = sns.countplot(y='common_name', data=df_top_on_date, palette=colors, order=frequencies_order, ax=axs[0])
|
plot = sns.countplot(y='common_name', data=df_top_on_date, palette=colors, order=frequencies_order, ax=axs[0])
|
||||||
|
|
||||||
plot.set(ylabel=None)
|
plot.set(ylabel=None)
|
||||||
plot.set(xlabel="Detections")
|
plot.set(xlabel="Detections")
|
||||||
|
|
||||||
heat = pd.crosstab(df_top_on_date['common_name'], df_top_on_date['hour'])
|
heat = pd.crosstab(df_top_on_date['common_name'], df_top_on_date['hour'])
|
||||||
# Order heatmap Birds by frequency of occurrance
|
# Order heatmap Birds by frequency of occurrance
|
||||||
heat.index = pd.CategoricalIndex(heat.index, categories=frequencies_order)
|
heat.index = pd.CategoricalIndex(heat.index, categories=frequencies_order)
|
||||||
heat.sort_index(level=0, inplace=True)
|
heat.sort_index(level=0, inplace=True)
|
||||||
|
|
||||||
hours_in_day = pd.Series(data=range(0, 24))
|
hours_in_day = pd.Series(data=range(0, 24))
|
||||||
heat_frame = pd.DataFrame(data=0, index=heat.index, columns=hours_in_day)
|
heat_frame = pd.DataFrame(data=0, index=heat.index, columns=hours_in_day)
|
||||||
heat = (heat + heat_frame).fillna(0)
|
heat = (heat + heat_frame).fillna(0)
|
||||||
|
|
||||||
# Generate heatmap plot
|
# Generate heatmap plot
|
||||||
plot = sns.heatmap(
|
plot = sns.heatmap(
|
||||||
heat,
|
heat,
|
||||||
norm=LogNorm(),
|
norm=LogNorm(),
|
||||||
annot=True,
|
annot=True,
|
||||||
|
@ -80,16 +93,32 @@ plot = sns.heatmap(
|
||||||
linecolor="Grey",
|
linecolor="Grey",
|
||||||
ax=axs[1],
|
ax=axs[1],
|
||||||
yticklabels=False)
|
yticklabels=False)
|
||||||
plot.set_xticklabels(plot.get_xticklabels(), rotation=0, size=7)
|
plot.set_xticklabels(plot.get_xticklabels(), rotation=0, size=7)
|
||||||
|
|
||||||
for _, spine in plot.spines.items():
|
for _, spine in plot.spines.items():
|
||||||
spine.set_visible(True)
|
spine.set_visible(True)
|
||||||
|
|
||||||
plot.set(ylabel=None)
|
plot.set(ylabel=None)
|
||||||
plot.set(xlabel="Hour of day")
|
plot.set(xlabel="Hour of day")
|
||||||
plt.suptitle(f"Top {CONFIG['readings']} species on {CONFIG['date']}", fontsize=14)
|
plt.suptitle(f"Top {CONFIG['readings']} species on {date}", fontsize=14)
|
||||||
plt.text(15, 11, f"(Updated on {datetime.now().strftime('%Y/%m-%d %H:%M')})")
|
plt.text(15, 11, f"(Updated on {datetime.now().strftime('%Y/%m-%d %H:%M')})")
|
||||||
plt.savefig(f"./var/charts/chart_{CONFIG['date']}.png", dpi=300)
|
plt.savefig(f"./var/charts/chart_{date}.png", dpi=300)
|
||||||
plt.close()
|
print(f"Plot for {date} saved.")
|
||||||
|
plt.close()
|
||||||
|
|
||||||
db.close()
|
def main():
|
||||||
|
done_charts = glob.glob(f"{CONFIG['charts_dir']}/*.png")
|
||||||
|
last_modified = max(done_charts, key=os.path.getctime)
|
||||||
|
last_modified_date = last_modified.split("_")[-1].split(".")[0]
|
||||||
|
missing_dates = pd.date_range(start=last_modified_date, end=CONFIG['date'], freq='D')
|
||||||
|
print(missing_dates)
|
||||||
|
for missing_date in missing_dates:
|
||||||
|
date = missing_date.strftime("%Y-%m-%d")
|
||||||
|
chart(date)
|
||||||
|
chart(CONFIG['date'])
|
||||||
|
if db is not None:
|
||||||
|
db.close()
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -1,13 +0,0 @@
|
||||||
[Unit]
|
|
||||||
Description=BirdNET-stream miner service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
User=<USER>
|
|
||||||
Group=<GROUP>
|
|
||||||
WorkingDirectory=<DIR>
|
|
||||||
ExecStart=bash ./daemon/birdnet_miner.sh
|
|
||||||
RemainAfterExit=yes
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
|
@ -1,9 +0,0 @@
|
||||||
[Unit]
|
|
||||||
Description=BirdNET-stream miner Timer
|
|
||||||
|
|
||||||
[Timer]
|
|
||||||
OnCalendar=*:0/15
|
|
||||||
Unit=birdnet_miner.service
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=timers.target
|
|
|
@ -1,19 +1,19 @@
|
||||||
version: '3.9'
|
version: '3.9'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
# recording:
|
recording:
|
||||||
# container_name: birdnet_recording
|
container_name: birdnet_recording
|
||||||
# build:
|
build:
|
||||||
# context: .
|
context: .
|
||||||
# dockerfile: ./docker/recording/Dockerfile
|
dockerfile: ./docker/recording/Dockerfile
|
||||||
# restart: unless-stopped
|
restart: unless-stopped
|
||||||
# environment:
|
volumes:
|
||||||
# - CHUNK_FOLDER=${CHUNK_FOLDER:-/media/birdnet/records}
|
- birdnet_app:${PROJECT_ROOT:-/opt/birdnet}
|
||||||
# volumes:
|
- birdnet_records:${RECORDS_DIR:-/media/birdnet/records}
|
||||||
# - ${RECORDS_DIR:-/media/birdnet/records}:${RECORS_FOLDER:-/media/birdnet/records}
|
# Allow container to access to the hosts microphone
|
||||||
# # Allow container to access to the hosts microphone
|
devices:
|
||||||
# devices:
|
- /dev/snd:/dev/snd
|
||||||
# - /dev/snd:/dev/snd
|
|
||||||
# analyzer:
|
# analyzer:
|
||||||
# container_name: birdnet_analyzer
|
# container_name: birdnet_analyzer
|
||||||
# build:
|
# build:
|
||||||
|
|
|
@ -11,15 +11,9 @@ ENV DEBIAN_FRONTEND noninteractive
|
||||||
RUN apt-get update && apt-get upgrade -y \
|
RUN apt-get update && apt-get upgrade -y \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libasound2 \
|
ffmpeg \
|
||||||
alsa-utils \
|
|
||||||
libsndfile1-dev \
|
|
||||||
&& apt-get install -y ffmpeg \
|
|
||||||
&& apt-get clean
|
&& apt-get clean
|
||||||
|
|
||||||
RUN mkdir -p /opt/birdnet/
|
WORKDIR /opt/birdnet
|
||||||
WORKDIR /opt/birdnet/
|
|
||||||
COPY config ./config
|
|
||||||
COPY daemon/birdnet_recording.sh /usr/local/bin/birdnet_recording.sh
|
|
||||||
|
|
||||||
ENTRYPOINT ["/usr/local/bin/birdnet_recording.sh"]
|
ENTRYPOINT ["./daemon/birdnet_recording.sh"]
|
|
@ -0,0 +1,58 @@
|
||||||
|
# Setting up the database
|
||||||
|
|
||||||
|
There is two database managment systems available: sqlite or mariadb (mysql).
|
||||||
|
|
||||||
|
## sqlite
|
||||||
|
|
||||||
|
To use sqlite, simply install the sqlite3 package, if it is not already installed on the machine that runs BirdNET-stream.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt-get install sqlite3
|
||||||
|
```
|
||||||
|
|
||||||
|
Then fill `config/birdnet.conf` with the proper DATABASE value (you may use any database location):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
DATABASE="./var/db.sqlite"
|
||||||
|
```
|
||||||
|
|
||||||
|
## mariadb
|
||||||
|
|
||||||
|
To use mariadb, you need to install the mariadb-server package.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt-get install mariadb-server
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, populate the `config/birdnet.conf` file with the proper DATABASE uri:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
DATABASE="mysql://user:password@localhost/birdnet_observations"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Symfony configuration
|
||||||
|
|
||||||
|
For both method you need to adapt the file `www/.env.local` to suit your new configuration.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd www
|
||||||
|
# If .env.local does not exists:
|
||||||
|
cp .env.local.example .env.local
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
# .env.local
|
||||||
|
# for sqlite (example)
|
||||||
|
DATABASE_DEFAULT_URL=sqlite:///%kernel.project_dir%/./var/db-default.sqlite
|
||||||
|
DATABASE_OBSERVATIONS_URL=sqlite:///%kernel.project_dir%/../var/db.sqlite
|
||||||
|
# for mariadb (example)
|
||||||
|
DATABASE_DEFAULT_URL=mysql://user:password@localhost/birdnet_default
|
||||||
|
DATABASE_OBSERVATIONS_URL=mysql://user:password@localhost/birdnet_observations
|
||||||
|
```
|
||||||
|
|
||||||
|
## PHP modules
|
||||||
|
|
||||||
|
For symfony to work, make sure you have the required modules according to each method:
|
||||||
|
|
||||||
|
- pdo_sqlite
|
||||||
|
- pdo_mysql
|
|
@ -34,10 +34,13 @@ Then, create your dotenv file and populate it with your own configuration (for i
|
||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, run docker-compose:
|
You may need to adapt the listening ports of the services or other configuration parameters.
|
||||||
|
In general all variables stated with ${VARIABLE:-default} inside [../docker-compose.yml](../docker-compose.yml) can be override in the .env file using `VARIABLE=value`.
|
||||||
|
|
||||||
|
Once that is done, you can build and start docker services:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build image (first time only)
|
# Build images (first time only, and for each update)
|
||||||
docker compose build
|
docker compose build
|
||||||
# Run
|
# Run
|
||||||
docker compose up # add `-d`, to run in background
|
docker compose up # add `-d`, to run in background
|
||||||
|
|
|
@ -5,7 +5,7 @@ set -e
|
||||||
|
|
||||||
DEBUG=${DEBUG:-0}
|
DEBUG=${DEBUG:-0}
|
||||||
|
|
||||||
REQUIREMENTS="git wget ffmpeg python3 python3-pip python3-dev python3-venv gzip sqlite3"
|
REQUIREMENTS="git wget ffmpeg python3 python3-pip python3-dev python3-venv zip unzip sqlite3"
|
||||||
REPOSITORY=${REPOSITORY:-https://github.com/UncleSamulus/BirdNET-stream.git}
|
REPOSITORY=${REPOSITORY:-https://github.com/UncleSamulus/BirdNET-stream.git}
|
||||||
BRANCH=${BRANCH:-main}
|
BRANCH=${BRANCH:-main}
|
||||||
WORKDIR="$(pwd)/BirdNET-stream"
|
WORKDIR="$(pwd)/BirdNET-stream"
|
||||||
|
@ -67,7 +67,7 @@ install_birdnetstream_services() {
|
||||||
DIR="$WORKDIR"
|
DIR="$WORKDIR"
|
||||||
cd "$WORKDIR"
|
cd "$WORKDIR"
|
||||||
debug "Setting up BirdNET stream systemd services"
|
debug "Setting up BirdNET stream systemd services"
|
||||||
services="birdnet_recording.service birdnet_analyzis.service birdnet_miner.timer birdnet_miner.service birdnet_plotter.service birdnet_plotter.timer"
|
services="birdnet_recording.service birdnet_analyzis.service birdnet_plotter.service birdnet_plotter.timer"
|
||||||
read -r -a services_array <<<"$services"
|
read -r -a services_array <<<"$services"
|
||||||
for service in ${services_array[@]}; do
|
for service in ${services_array[@]}; do
|
||||||
sudo cp "daemon/systemd/templates/$service" "/etc/systemd/system/"
|
sudo cp "daemon/systemd/templates/$service" "/etc/systemd/system/"
|
||||||
|
@ -78,7 +78,7 @@ install_birdnetstream_services() {
|
||||||
done
|
done
|
||||||
sudo sed -i "s|<VENV>|$WORKDIR/$PYTHON_VENV|g" "/etc/systemd/system/birdnet_plotter.service"
|
sudo sed -i "s|<VENV>|$WORKDIR/$PYTHON_VENV|g" "/etc/systemd/system/birdnet_plotter.service"
|
||||||
sudo systemctl daemon-reload
|
sudo systemctl daemon-reload
|
||||||
enabled_services="birdnet_recording.service birdnet_analyzis.service birdnet_miner.timer birdnet_plotter.timer"
|
enabled_services="birdnet_recording.service birdnet_analyzis.service birdnet_plotter.timer"
|
||||||
read -r -a services_array <<<"$services"
|
read -r -a services_array <<<"$services"
|
||||||
for service in ${services_array[@]}; do
|
for service in ${services_array[@]}; do
|
||||||
debug "Enabling $service"
|
debug "Enabling $service"
|
||||||
|
@ -163,7 +163,7 @@ setup_http_server() {
|
||||||
fi
|
fi
|
||||||
debug "Enable birdnet.lan domain"
|
debug "Enable birdnet.lan domain"
|
||||||
sudo ln -s /etc/nginx/sites-available/birdnet-stream.conf /etc/nginx/sites-enabled/birdnet-stream.conf
|
sudo ln -s /etc/nginx/sites-available/birdnet-stream.conf /etc/nginx/sites-enabled/birdnet-stream.conf
|
||||||
debug "Info: Please edit /etc/nginx/sites-available/birdnet-stream.conf to set the correct server name and paths"
|
debug "INFO: Please edit /etc/nginx/sites-available/birdnet-stream.conf to set the correct server name and paths"
|
||||||
debug "Setup nginx variables the best way possible"
|
debug "Setup nginx variables the best way possible"
|
||||||
sudo sed -i "s|<SYMFONY_PUBLIC>|$WORKDIR/www/public/|g" /etc/nginx/sites-available/birdnet-stream.conf
|
sudo sed -i "s|<SYMFONY_PUBLIC>|$WORKDIR/www/public/|g" /etc/nginx/sites-available/birdnet-stream.conf
|
||||||
sudo sed -i "s|<RECORDS_DIR>|$CHUNK_FOLDER/out|g" /etc/nginx/sites-available/birdnet-stream.conf
|
sudo sed -i "s|<RECORDS_DIR>|$CHUNK_FOLDER/out|g" /etc/nginx/sites-available/birdnet-stream.conf
|
||||||
|
|
|
@ -38,3 +38,12 @@ uninstall_webapp() {
|
||||||
sudo unlink /etc/nginx/sites-enabled/birdnet-stream.conf
|
sudo unlink /etc/nginx/sites-enabled/birdnet-stream.conf
|
||||||
sudo systemctl restart nginx
|
sudo systemctl restart nginx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
echo "WARNING: This will remove all BirdNET-stream related files and services. \
|
||||||
|
Note that it may forget some special configuration."
|
||||||
|
uninstall_webapp
|
||||||
|
uninstall_birdnet_services
|
||||||
|
}
|
||||||
|
|
||||||
|
main
|
|
@ -0,0 +1,35 @@
|
||||||
|
#! /usr/bin/env bash
|
||||||
|
# Extract observations from a model output folder
|
||||||
|
#
|
||||||
|
set -e
|
||||||
|
# set -x
|
||||||
|
|
||||||
|
DEBUG=${DEBUG:-1}
|
||||||
|
debug() {
|
||||||
|
[[ $DEBUG -eq 1 ]] && echo "$@"
|
||||||
|
}
|
||||||
|
if [[ -f ./config/birdnet.conf ]]; then
|
||||||
|
source ./config/birdnet.conf
|
||||||
|
else
|
||||||
|
debug "./config/birdnet.conf does not exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [[ ! -d ${CHUNK_FOLDER} ]]; then
|
||||||
|
debug "CHUNK_FOLDER ${CHUNK_FOLDER} does not exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
model_outputs() {
|
||||||
|
ls ${CHUNK_FOLDER}/out/*/model.out.csv
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
# # Remove all junk observations
|
||||||
|
# ./daemon/birdnet_clean.sh
|
||||||
|
# Get model outputs
|
||||||
|
for model_output in $(model_outputs); do
|
||||||
|
./daemon/birdnet_output_to_sql.sh "$model_output"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
main
|
|
@ -25,11 +25,15 @@ class HomeController extends AbstractController
|
||||||
* @Route("", name="home")
|
* @Route("", name="home")
|
||||||
* @Route("/{_locale<%app.supported_locales%>}/", name="home_i18n")
|
* @Route("/{_locale<%app.supported_locales%>}/", name="home_i18n")
|
||||||
*/
|
*/
|
||||||
public function index()
|
public function index(Request $request)
|
||||||
{
|
{
|
||||||
|
$date = $request->get("on");
|
||||||
|
if ($date == null) {
|
||||||
|
$date = date("Y-m-d");
|
||||||
|
}
|
||||||
return $this->render('index.html.twig', [
|
return $this->render('index.html.twig', [
|
||||||
"stats" => $this->get_stats(),
|
"stats" => $this->get_stats($date),
|
||||||
"charts" => $this->last_chart_generated(),
|
"charts" => $this->last_chart_generated($date),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,11 +46,12 @@ class HomeController extends AbstractController
|
||||||
return $this->render('about/index.html.twig', []);
|
return $this->render('about/index.html.twig', []);
|
||||||
}
|
}
|
||||||
|
|
||||||
private function get_stats()
|
private function get_stats($date)
|
||||||
{
|
{
|
||||||
$stats = array();
|
$stats = array();
|
||||||
$stats["most-recorded-species"] = $this->get_most_recorded_species();
|
$stats["most-recorded-species"] = $this->get_most_recorded_species();
|
||||||
$stats["last-detected-species"] = $this->get_last_recorded_species();
|
$stats["last-detected-species"] = $this->get_last_recorded_species();
|
||||||
|
$stats["number-of-species-detected"] = $this->get_number_of_species_detected($date);
|
||||||
return $stats;
|
return $stats;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,6 +91,27 @@ class HomeController extends AbstractController
|
||||||
return $species;
|
return $species;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private function get_number_of_species_detected($date)
|
||||||
|
{
|
||||||
|
$count = 0;
|
||||||
|
$sql = "SELECT COUNT(`taxon_id`) AS contact_count
|
||||||
|
FROM `observation`
|
||||||
|
WHERE STRFTIME('%Y-%m-%d', `date`) = :date
|
||||||
|
GROUP BY `taxon_id`";
|
||||||
|
try {
|
||||||
|
$stmt = $this->connection->prepare($sql);
|
||||||
|
$stmt->bindValue(":date", $date);
|
||||||
|
$result = $stmt->executeQuery();
|
||||||
|
$output = $result->fetchAllAssociative();
|
||||||
|
if ($output != null) {
|
||||||
|
$count = $output[0]["contact_count"];
|
||||||
|
}
|
||||||
|
} catch (\Exception $e) {
|
||||||
|
$this->logger->error($e->getMessage());
|
||||||
|
}
|
||||||
|
return $count;
|
||||||
|
}
|
||||||
|
|
||||||
private function last_chart_generated()
|
private function last_chart_generated()
|
||||||
{
|
{
|
||||||
$files = glob($this->getParameter('kernel.project_dir') . '/../var/charts/*.png');
|
$files = glob($this->getParameter('kernel.project_dir') . '/../var/charts/*.png');
|
||||||
|
|
|
@ -1,49 +1,77 @@
|
||||||
<div id="stats">
|
<div id="stats">
|
||||||
<h2>{{ "Quick Stats" | trans }}</h2>
|
<h2>
|
||||||
|
{{ 'Quick Stats'|trans }}
|
||||||
|
</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li class="most-recorded-species">
|
<li class="stat">
|
||||||
{{ "Most recorded species" | trans }}:
|
{{ 'Most recorded species'|trans }}:{% if
|
||||||
{% if stats["most-recorded-species"] is defined and stats["most-recorded-species"]|length > 0 %}
|
stats['most-recorded-species'] is defined
|
||||||
|
and (stats['most-recorded-species']|length) > 0 %}
|
||||||
<span class="scientific-name">
|
<span class="scientific-name">
|
||||||
{{ stats["most-recorded-species"]["scientific_name"] }}
|
{{ stats['most-recorded-species']['scientific_name'] }}
|
||||||
</span>
|
</span>
|
||||||
(<span class="common_name">{{ stats["most-recorded-species"]["common_name"] }}</span>)
|
(<span class="common_name">
|
||||||
{{ "with" | trans }}
|
{{ stats['most-recorded-species']['common_name'] }}
|
||||||
|
</span>)
|
||||||
|
{{ 'with'|trans }}
|
||||||
<span class="observation-count">
|
<span class="observation-count">
|
||||||
{{ stats["most-recorded-species"]["contact_count"] }}
|
{{ stats['most-recorded-species']['contact_count'] }}
|
||||||
</span>
|
</span>
|
||||||
{{ "contacts" | trans }}.
|
{{ 'contacts'|trans }}.
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "No species in database." | trans }}
|
{{ 'No species in database.'|trans }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</li>
|
</li>
|
||||||
<li class="last-recorded-species">
|
<li class="stat">
|
||||||
{{ "Last detected species" | trans }}:
|
{{ 'Last detected species'|trans }}:{% if
|
||||||
{% if stats["last-detected-species"] is defined and stats["last-detected-species"]|length > 0 %}
|
stats['last-detected-species'] is defined
|
||||||
|
and (stats['last-detected-species']|length) > 0 %}
|
||||||
<span class="scientific-name">
|
<span class="scientific-name">
|
||||||
{{ stats["last-detected-species"]["scientific_name"] }}
|
{{ stats['last-detected-species']['scientific_name'] }}
|
||||||
</span>
|
</span>
|
||||||
(<span class="common_name">{{ stats["last-detected-species"]["common_name"] }}</span>)
|
(<span class="common_name">
|
||||||
{{ "with" | trans }}
|
{{ stats['last-detected-species']['common_name'] }}
|
||||||
|
</span>)
|
||||||
|
{{ 'with'|trans }}
|
||||||
<span class="confidence">
|
<span class="confidence">
|
||||||
{{ stats["last-detected-species"]["confidence"] }}
|
{{ stats['last-detected-species']['confidence'] }}
|
||||||
</span>
|
</span>
|
||||||
{{ "AI confidence" | trans }}
|
{{ 'AI confidence'|trans }}
|
||||||
<span class="datetime">
|
<span class="datetime">
|
||||||
{% set date = stats["last-detected-species"]["date"] %}
|
{% set date = stats['last-detected-species']['date'] %}
|
||||||
{% if date | date("Y-m-d") == "now" | date("Y-m-d") %}
|
{% if (date|date('Y-m-d')) == ('now'|date('Y-m-d')) %}
|
||||||
{{ "today" | trans }}
|
{{ 'today'|trans }}
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "on" | trans }}
|
{{ 'on'|trans }}
|
||||||
{{ date | format_datetime("full", "none") }}
|
{{ date|format_datetime('full', 'none') }}
|
||||||
{% endif %}
|
{% endif %}at
|
||||||
at
|
<span class="time">{{ date|date('H:i') }}</span>
|
||||||
<span class="time">
|
|
||||||
{{ date | date("H:i") }}
|
|
||||||
</span>
|
|
||||||
</span>.
|
</span>.
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ "No species in database" | trans }}
|
{{ 'No species in database'|trans }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
<li class="stat">
|
||||||
|
{% set today = 'now'|date('Y-m-d') %}
|
||||||
|
{% set date = app.request.get('on') %}
|
||||||
|
{% if
|
||||||
|
stats['number-of-species-detected'] is defined
|
||||||
|
and stats['number-of-species-detected'] > 0 %}
|
||||||
|
{% if today == date %}
|
||||||
|
{{ 'Number of species detected today: '|trans }}
|
||||||
|
{% else %}
|
||||||
|
{{ 'Number of species detected on '|trans }}
|
||||||
|
{{ date|format_datetime('full', 'none') }}:
|
||||||
|
{% endif %}
|
||||||
|
<span>{{ stats['number-of-species-detected'] }}</span>.
|
||||||
|
{% else %}
|
||||||
|
{# {{ 'No species detected today'|trans }} #}
|
||||||
|
{% if today == date %}
|
||||||
|
{{ 'No species detected today.'|trans }}
|
||||||
|
{% else %}
|
||||||
|
{{ 'No species detected on '|trans }}
|
||||||
|
{{ date|format_datetime('full', 'none') }}
|
||||||
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
Loading…
Reference in New Issue