Compare commits

..

7 Commits

Author SHA1 Message Date
f9774c568d Обновил readme 2024-04-28 01:35:37 +03:00
65d7c642bf Update docker settings 2023-03-17 15:51:55 +03:00
52684a0b7c Small encoding fix 2023-03-17 00:39:29 +03:00
4b822855a7 Optimize Dokerfile 2023-01-12 13:05:38 +03:00
f4ce9083ae Upload test github action 2023-01-12 12:20:39 +03:00
5b808df1c2 Add docker support 2023-01-12 12:06:24 +03:00
bc0b2afb38 Rename cron file and fix requirements 2023-01-12 10:39:00 +03:00
6 changed files with 82 additions and 6 deletions

View File

@ -0,0 +1,41 @@
name: Create and publish a Docker image
on:
push:
branches: ['main']
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

17
Dockerfile Normal file
View File

@ -0,0 +1,17 @@
FROM python:3.11.2-alpine3.17
LABEL Maintainer="serega404"
WORKDIR /app
COPY requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
# Setting up crontab
COPY crontab /tmp/crontab
RUN cat /tmp/crontab > /etc/crontabs/root
COPY main.py main.py
# run crond as main process of container
CMD ["crond", "-f", "-l", "2"]

View File

@ -1,6 +1,6 @@
# MetricAliexpressExchangeRate # MetricAliexpressExchangeRate
[![MIT License](https://img.shields.io/github/license/serega404/EasyESPRealy)](https://github.com/serega404/MetricAliexpressExchangeRate) [![MIT License](https://img.shields.io/github/license/serega404/MetricAliexpressExchangeRate)](https://github.com/serega404/MetricAliexpressExchangeRate/blob/main/LICENSE)
Экспортер курса Aliexpress и ЦБ РФ Экспортер курса Aliexpress и ЦБ РФ
@ -20,8 +20,22 @@
<img src="./grafana.png" width="400" height="300" /> <img src="./grafana.png" width="400" height="300" />
### Запуск в Docker
``` Docker
docker run -d --name MetricAliexpressExchangeRate \
--restart=always \
-e METRIC_SERVER_URL='http(s)://<IP_ADDR>:<PORT>/' \
ghcr.io/serega404/metricaliexpressexchangerate:main
```
#### Дополнительные переменные среды:
* `CBRF_APISITE_URL`
* `ALI1USD_PAGE_URL`
### Библиотеки ### Библиотеки
* [Requests](https://requests.readthedocs.io/en/latest/)
* [BeautifulSoup](https://www.crummy.com/software/BeautifulSoup/) * [BeautifulSoup](https://www.crummy.com/software/BeautifulSoup/)
### Лицензия ### Лицензия

2
crontab Normal file
View File

@ -0,0 +1,2 @@
@reboot cd /app && python3 /app/main.py
*/10 * * * * cd /app && python3 /app/main.py

10
main.py
View File

@ -1,11 +1,11 @@
import requests, re, json import requests, re, json, os
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
# Config # Config
Ali1USDPageURL = 'https://aliexpress.ru/item/32892046259.html' Ali1USDPageURL = os.environ.get('ALI1USD_PAGE_URL', 'https://aliexpress.ru/item/32892046259.html')
CBRFApiSiteURL = 'https://www.cbr-xml-daily.ru/daily_json.js' CBRFApiSiteURL = os.environ.get('CBRF_APISITE_URL', 'https://www.cbr-xml-daily.ru/daily_json.js')
MetricServerURL = 'http://192.168.0.100:8428/' MetricServerURL = os.environ.get('METRIC_SERVER_URL', 'http://192.168.0.100:8428/')
KursAli = "" KursAli = ""
KursCBRF = "" KursCBRF = ""
@ -18,7 +18,7 @@ if (site.status_code != 200):
print("Ali request error: " + str(site.status_code)) print("Ali request error: " + str(site.status_code))
exit() exit()
soup = BeautifulSoup(site.text, "html.parser") soup = BeautifulSoup(site.content, "html.parser")
tmpstring = " "*20 tmpstring = " "*20
for tag in soup.find_all("div"): for tag in soup.find_all("div"):

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
requests
beautifulsoup4